In [ ]:
import os
from zipfile import ZipFile
import pickle
import numpy as np
import cv2
import matplotlib.pyplot as plt
from matplotlib import patches
import pandas as pd
from imgaug import augmenters as iaa
import glob
import imageio
import re
from imgaug.augmentables.bbs import BoundingBoxesOnImage
import seaborn as sns

import click

import visualkeras

from collections import defaultdict

from sklearn.metrics import classification_report, precision_score, recall_score, f1_score, confusion_matrix

import tensorflow as tf
from keras import backend, Model, Input, activations
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout, AveragePooling2D, BatchNormalization, MaxPool2D, ZeroPadding2D, Activation, Add, GlobalAveragePooling2D
from keras.losses import categorical_crossentropy, CategoricalCrossentropy
from keras.utils import np_utils, load_img, img_to_array
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, EarlyStopping
from keras.optimizers import Adam
from keras.regularizers import l2
from keras.applications import MobileNetV2
from keras.applications.mobilenet_v2 import preprocess_input
from keras.preprocessing.image import ImageDataGenerator

from models.research.object_detection.utils import ops as utils_ops
from models.research.object_detection.utils import label_map_util
from models.research.object_detection.utils import visualization_utils as vis_util

import pathlib
from IPython.display import display
from PIL import Image

# patch tf1 into `utils.ops`
utils_ops.tensorflow = tf.compat.v1

# Patch the location of gfile
tf.gfile = tf.io.gfile


PATH_TO_LABELS = 'models/research/object_detection/data/mscoco_label_map.pbtxt'
category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS, use_display_name=True)

from sklearn.preprocessing import LabelEncoder, LabelBinarizer
le=LabelEncoder()

lb=LabelBinarizer()

import warnings
warnings.filterwarnings("ignore")

plt.grid(None)
plt.axis('off')
random_state=42
%matplotlib inline

model_list=list()
# !git clone https://github.com/tensorflow/models
2023-06-22 13:19:13.462881: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.

Milestone 1¶

Step 1: Import the data.¶

Step 2: Map training and testing images to its classes.¶

Step 3: Map training and testing images to its annotations.¶

In [ ]:
ZipFile('Car Images.zip').extractall()
ZipFile('Annotations.zip').extractall()
In [ ]:
car_name_df = pd.read_csv('Car names and make.csv', names=['Car Name and Make'])
car_name_df.head()
Out[ ]:
Car Name and Make
0 AM General Hummer SUV 2000
1 Acura RL Sedan 2012
2 Acura TL Sedan 2012
3 Acura TL Type-S 2008
4 Acura TSX Sedan 2012
In [ ]:
car_name_df.shape
Out[ ]:
(196, 1)
In [ ]:
annot_train_df = pd.read_csv('Annotations/Train Annotations.csv', header=0)
annot_train_df.columns=['Image_Name','x1','y1','x2','y2','Image_Class']
annot_train_df.head()
Out[ ]:
Image_Name x1 y1 x2 y2 Image_Class
0 00001.jpg 39 116 569 375 14
1 00002.jpg 36 116 868 587 3
2 00003.jpg 85 109 601 381 91
3 00004.jpg 621 393 1484 1096 134
4 00005.jpg 14 36 133 99 106
In [ ]:
annot_train_df.shape
Out[ ]:
(8144, 6)
In [ ]:
annot_test_df = pd.read_csv('Annotations/Test Annotation.csv', header=0)
annot_test_df.columns=['Image_Name','x1','y1','x2','y2','Image_Class']
annot_test_df.head()
Out[ ]:
Image_Name x1 y1 x2 y2 Image_Class
0 00001.jpg 30 52 246 147 181
1 00002.jpg 100 19 576 203 103
2 00003.jpg 51 105 968 659 145
3 00004.jpg 67 84 581 407 187
4 00005.jpg 140 151 593 339 185
In [ ]:
annot_test_df.shape
Out[ ]:
(8041, 6)
In [ ]:
# Train folder
train_dir = 'Car Images/Train Images'
training_images_list = []
for folder in os.listdir(train_dir):
    img_folder = os.path.join(train_dir, folder)
    if not os.path.isdir(img_folder):
        continue
    for img_file in os.listdir(img_folder):
        img_path = os.path.join(img_folder, img_file)
        training_images_list.append([img_file, img_path, folder, annot_train_df[annot_train_df['Image_Name'] == img_file]['Image_Class'].values[0]])

# Creating dataframe
training_car_images_df = pd.DataFrame(training_images_list,
                                      columns=['Image_Name', 'Image_Path', 'Class', 'Image_Class'])
training_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class
0 06796.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90
1 04157.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90
2 00682.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90
3 07660.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90
4 00483.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90
In [ ]:
training_car_images_df.shape
Out[ ]:
(8144, 4)
In [ ]:
# Test folder
test_dir = 'Car Images/Test Images'
testing_images_list = []
for folder in os.listdir(test_dir):
    img_folder = os.path.join(test_dir, folder)
    if not os.path.isdir(img_folder):
        continue
    for img_file in os.listdir(img_folder):
        img_path = os.path.join(img_folder, img_file)
        testing_images_list.append([img_file, img_path, folder, annot_test_df[annot_test_df['Image_Name'] == img_file]['Image_Class'].values[0]])

# Creating dataframe
testing_car_images_df = pd.DataFrame(testing_images_list,
                                     columns=['Image_Name', 'Image_Path', 'Class', 'Image_Class'])
testing_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class
0 00318.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90
1 03773.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90
2 01206.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90
3 08018.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90
4 05448.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90
In [ ]:
testing_car_images_df.shape
Out[ ]:
(8041, 4)
In [ ]:
annot_train_car_images_df=pd.merge(training_car_images_df, annot_train_df[['Image_Name','x1','y1','x2','y2']], on='Image_Name')
annot_train_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class x1 y1 x2 y2
0 06796.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 57 88 361 293
1 04157.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 10 20 256 171
2 00682.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 11 15 297 201
3 07660.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 68 122 980 690
4 00483.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 18 26 623 342
In [ ]:
annot_train_car_images_df.shape
Out[ ]:
(8144, 8)
In [ ]:
annot_test_car_images_df=pd.merge(testing_car_images_df, annot_test_df[['Image_Name','x1','y1','x2','y2']], on='Image_Name')
annot_test_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class x1 y1 x2 y2
0 00318.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 83 55 465 324
1 03773.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 33 43 240 168
2 01206.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 9 60 294 183
3 08018.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 16 185 1011 652
4 05448.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 2 43 278 186
In [ ]:
annot_test_car_images_df.shape
Out[ ]:
(8041, 8)
In [ ]:
# Distribution of class in train dataset
plt.figure(figsize=(70,15))
sns.countplot(data=annot_train_car_images_df,x='Class')
plt.xticks(rotation=90, fontsize=20)
plt.suptitle('Distribution of class in train dataset', fontsize=50,fontweight='bold')
plt.ylabel('Count', fontsize=30,fontweight='bold')
plt.xlabel('Car Names', fontsize=30,fontweight='bold')
plt.show()
In [ ]:
annot_train_car_images_df['Class'].value_counts(ascending=False, normalize=True).mul(100).round(2).astype(str) + '%'
Out[ ]:
GMC Savana Van 2012                                    0.83%
Chrysler 300 SRT-8 2010                                 0.6%
Mercedes-Benz 300-Class Convertible 1993               0.59%
Mitsubishi Lancer Sedan 2012                           0.59%
Chevrolet Corvette ZR1 2012                            0.58%
                                                       ...  
Rolls-Royce Phantom Drophead Coupe Convertible 2012    0.38%
Chevrolet Express Cargo Van 2007                       0.37%
Maybach Landaulet Convertible 2012                     0.36%
FIAT 500 Abarth 2012                                   0.34%
Hyundai Accent Sedan 2012                              0.29%
Name: Class, Length: 196, dtype: object
In [ ]:
# Distribution of class in test dataset
plt.figure(figsize=(70,15))
sns.countplot(data=annot_test_car_images_df,x='Class')
plt.xticks(rotation=90, fontsize=20)
plt.suptitle('Distribution of class in test dataset', fontsize=50,fontweight='bold')
plt.ylabel('Count', fontsize=30,fontweight='bold')
plt.xlabel('Car Names', fontsize=30,fontweight='bold')
plt.show()
In [ ]:
annot_test_car_images_df['Class'].value_counts(ascending=False, normalize=True).mul(100).round(2).astype(str) + '%'
Out[ ]:
GMC Savana Van 2012                                    0.85%
Mercedes-Benz 300-Class Convertible 1993                0.6%
Chrysler 300 SRT-8 2010                                 0.6%
Mitsubishi Lancer Sedan 2012                           0.58%
Audi S6 Sedan 2011                                     0.57%
                                                       ...  
Rolls-Royce Phantom Drophead Coupe Convertible 2012    0.37%
Chevrolet Express Cargo Van 2007                       0.36%
Maybach Landaulet Convertible 2012                     0.36%
FIAT 500 Abarth 2012                                   0.34%
Hyundai Accent Sedan 2012                               0.3%
Name: Class, Length: 196, dtype: object

Data pre-processing

In [ ]:
annot_train_car_images_df_copy=annot_train_car_images_df.copy(deep=True)
annot_train_car_images_df_copy.head()
Out[ ]:
Image_Name Image_Path Class Image_Class x1 y1 x2 y2
0 06796.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 57 88 361 293
1 04157.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 10 20 256 171
2 00682.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 11 15 297 201
3 07660.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 68 122 980 690
4 00483.jpg Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 90 18 26 623 342
In [ ]:
# Train crop folder
train_dir_crop = 'Car Images/Train Images Cropped'
margin=15
for idx,row in annot_train_car_images_df.iterrows():
    dst_path = os.path.join(train_dir_crop, row['Class'])
    if not os.path.exists(dst_path):
        os.makedirs(dst_path)

    car_img_name=row['Image_Path'].split('/')[-1]
    dst_path = os.path.join(dst_path, car_img_name)

    img = cv2.imread(row['Image_Path'])

    height, width = img.shape[:2]

    x1 = max(0, row['x1'] - margin)
    y1 = max(0, row['y1'] - margin)
    x2 = min(row['x2'] + margin, width)
    y2 = min(row['y2'] + margin, height)

    crop_image = img[y1:y2, x1:x2]
    cv2.imwrite(dst_path, crop_image)

    annot_train_car_images_df.loc[idx,['Image_Path']]=dst_path
In [ ]:
annot_train_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class x1 y1 x2 y2
0 06796.jpg Car Images/Train Images Cropped/Dodge Dakota C... Dodge Dakota Crew Cab 2010 90 57 88 361 293
1 04157.jpg Car Images/Train Images Cropped/Dodge Dakota C... Dodge Dakota Crew Cab 2010 90 10 20 256 171
2 00682.jpg Car Images/Train Images Cropped/Dodge Dakota C... Dodge Dakota Crew Cab 2010 90 11 15 297 201
3 07660.jpg Car Images/Train Images Cropped/Dodge Dakota C... Dodge Dakota Crew Cab 2010 90 68 122 980 690
4 00483.jpg Car Images/Train Images Cropped/Dodge Dakota C... Dodge Dakota Crew Cab 2010 90 18 26 623 342
In [ ]:
annot_test_car_images_df_copy=annot_test_car_images_df.copy(deep=True)
annot_test_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class x1 y1 x2 y2
0 00318.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 83 55 465 324
1 03773.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 33 43 240 168
2 01206.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 9 60 294 183
3 08018.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 16 185 1011 652
4 05448.jpg Car Images/Test Images/Dodge Dakota Crew Cab 2... Dodge Dakota Crew Cab 2010 90 2 43 278 186
In [ ]:
# Test crop folder
test_dir_crop = 'Car Images/Test Images Cropped'
margin=15
for idx,row in annot_test_car_images_df.iterrows():
    dst_path = os.path.join(test_dir_crop, row['Class'])
    if not os.path.exists(dst_path):
        os.makedirs(dst_path)

    car_img_name=row['Image_Path'].split('/')[-1]
    dst_path = os.path.join(dst_path, car_img_name)

    img = cv2.imread(row['Image_Path'])

    height, width = img.shape[:2]

    x1 = max(0, row['x1'] - margin)
    y1 = max(0, row['y1'] - margin)
    x2 = min(row['x2'] + margin, width)
    y2 = min(row['y2'] + margin, height)

    crop_image = img[y1:y2, x1:x2]
    cv2.imwrite(dst_path, crop_image)

    annot_test_car_images_df.loc[idx,['Image_Path']]=dst_path
In [ ]:
annot_test_car_images_df.head()
Out[ ]:
Image_Name Image_Path Class Image_Class x1 y1 x2 y2
0 00318.jpg Car Images/Test Images Cropped/Dodge Dakota Cr... Dodge Dakota Crew Cab 2010 90 83 55 465 324
1 03773.jpg Car Images/Test Images Cropped/Dodge Dakota Cr... Dodge Dakota Crew Cab 2010 90 33 43 240 168
2 01206.jpg Car Images/Test Images Cropped/Dodge Dakota Cr... Dodge Dakota Crew Cab 2010 90 9 60 294 183
3 08018.jpg Car Images/Test Images Cropped/Dodge Dakota Cr... Dodge Dakota Crew Cab 2010 90 16 185 1011 652
4 05448.jpg Car Images/Test Images Cropped/Dodge Dakota Cr... Dodge Dakota Crew Cab 2010 90 2 43 278 186

Step 4: Display images with bounding box.¶

In [ ]:
test_bounding_df=annot_test_car_images_df_copy.groupby('Image_Class').apply(lambda x:  x.sample(1)).reset_index(drop=True)
test_bounding_df.shape
Out[ ]:
(196, 8)
In [ ]:
click.secho("Test Dataset bounding box".center(65), fg="blue", bold=True, italic=True)
for idx,row in test_bounding_df.sample(n=5,random_state=random_state).iterrows():
    plt.figure(figsize=(8,3))
    plt.suptitle(row['Class'])

    img = cv2.imread(row['Image_Path'])

    plt.subplot(121),plt.imshow(img),plt.axis('off')

    cv2.rectangle(img, (row['x1'], row['y1']), (row['x2'], row['y2']), (255, 165, 0), 4, cv2.LINE_AA)

    plt.subplot(122),plt.imshow(img),plt.axis('off')
                    Test Dataset bounding box                    
In [ ]:
train_bounding_df=annot_train_car_images_df_copy.groupby('Image_Class').apply(lambda x:  x.sample(1)).reset_index(drop=True)
train_bounding_df.shape
Out[ ]:
(196, 8)
In [ ]:
click.secho("Train Dataset bounding box".center(65), fg="blue", bold=True, italic=True)
for idx,row in train_bounding_df.sample(n=5,random_state=random_state).iterrows():
    plt.figure(figsize=(8,3))
    plt.suptitle(row['Class'])

    img = cv2.imread(row['Image_Path'])

    plt.subplot(121),plt.imshow(img),plt.axis('off')

    cv2.rectangle(img, (row['x1'], row['y1']), (row['x2'], row['y2']), (255, 165, 0), 4, cv2.LINE_AA)

    plt.subplot(122),plt.imshow(img),plt.axis('off')
                    Train Dataset bounding box                   

Step 5: Design, train and test basic CNN models to classify the car.¶

In [ ]:
def convert(img_name, car_image_list):
    # Converting image to array
    cv_img=cv2.imread(img_name)

    # Unifying all images to same height and width and then normalizing pixels
    car_image_list.append(cv2.resize(cv_img, (224,224), interpolation = cv2.INTER_AREA).astype('float32')/255.0)
In [ ]:
def visualkeras_view(model):
    color_map = defaultdict(dict)

    color_map[Conv2D]['fill'] = '#00f5d4'
    color_map[MaxPooling2D]['fill'] = '#8338ec'
    color_map[Dropout]['fill'] = '#03045e'
    color_map[Dense]['fill'] = '#fb5607'
    color_map[Flatten]['fill'] = '#ffbe0b'

    return visualkeras.layered_view(model,legend=True,color_map=color_map,max_xy=500, min_xy=10, min_z=10,spacing=20,max_z=500, scale_xy=0.5)
In [ ]:
def create_callbacks(model):
    checkpoint=ModelCheckpoint(model+"_weights.h5",monitor='val_accuracy',
                            save_weights_only=True, mode='max',verbose=1)
    early_stop = EarlyStopping(patience=8, min_delta=1e-4, monitor='val_loss')
    reduce_lr=ReduceLROnPlateau(monitor='val_loss',factor=0.1,patience=4,min_lr=1e-4)

    return [checkpoint,reduce_lr,early_stop]
In [ ]:
def metric_score(model_history, model, batch_size, model_name, model_lists, is_image_augmented=False, train_set=None, test_set=None):

    # Capturing learning history per epoch
    model_hist  = pd.DataFrame(model_history.history)
    model_hist['epoch'] = model_history.epoch

    # Plotting Loss at different epochs
    plt.title('Training Loss vs Validation Loss',fontsize=15,color="green")
    plt.plot(model_hist['loss'])
    plt.plot(model_hist['val_loss'])
    plt.ylabel('Loss')
    plt.xlabel('Epoch')
    plt.legend(("training" , "validation") , loc ='best')
    plt.show()

    # Plotting Accuracy at different epochs
    plt.title('Training Accuracy vs Validation Accuracy',fontsize=15,color="green")
    plt.plot(model_hist['accuracy'])
    plt.plot(model_hist['val_accuracy'])
    plt.ylabel('accuracy')
    plt.xlabel('Epoch')
    plt.legend(("training" , "validation") , loc ='best')
    plt.show()

    # calculate score of training data
    train_score = model.evaluate_generator(train_set, verbose=1) if is_image_augmented else model.evaluate(X_train, y_train, batch_size=batch_size, verbose=1)

    # calculate score of testing data
    test_score = model.evaluate_generator(test_set, verbose=1) if is_image_augmented else model.evaluate(X_test, y_test, batch_size=batch_size, verbose=1)

    # Predicting for X_test
    y_pred=np.argmax(model.predict_generator(test_generator, verbose=1) if is_image_augmented else model.predict_g(X_test, verbose=1), axis = 1)

    if not is_image_augmented:
        y_check_test = np.argmax(y_test, axis = 1)

    print("\nClassification Matrix:\n",classification_report(test_generator.classes, y_pred) if is_image_augmented else classification_report(y_check_test,y_pred))

    plt.figure(figsize = (30, 30))
    sns.heatmap(confusion_matrix(test_generator.classes, y_pred) if is_image_augmented else confusion_matrix(y_check_test,y_pred), annot = True,  fmt = '.1f')
    plt.ylabel('Actual')
    plt.xlabel('Predicted')
    plt.show()

    if is_image_augmented:
        model_lists.append([model_name, train_score[0], test_score[0], train_score[1], test_score[1], precision_score(test_generator.classes,y_pred, average='micro'), recall_score(test_generator.classes,y_pred, average='micro'), f1_score(test_generator.classes,y_pred, average='micro')])
    else:
        model_lists.append([model_name, train_score[0], test_score[0], train_score[1], test_score[1], precision_score(y_check_test,y_pred, average='micro'), recall_score(y_check_test,y_pred, average='micro'), f1_score(y_check_test,y_pred, average='micro')])

Creating X,y for train dataset¶

In [ ]:
# Formatting i/ps for X and normalizing pixels
annot_train_car_images_list=[]

annot_train_car_images_df['Image_Path'].apply(lambda x: convert(x,annot_train_car_images_list))
X_train=np.array(annot_train_car_images_list)
In [ ]:
# Encoding labels of the images
y_train=annot_train_car_images_df['Image_Class']
y_train=le.fit_transform(y_train)
y_train=np_utils.to_categorical(y_train)
In [ ]:
# X_train and y_train dataframe has been created for further analysis
X_train.shape, y_train.shape
Out[ ]:
((8144, 224, 224, 3), (8144, 196))

Creating X,y for test dataset¶

In [ ]:
# Formatting i/ps for X and normalizing pixels
annot_test_car_images_list=[]

annot_test_car_images_df['Image_Path'].apply(lambda x: convert(x,annot_test_car_images_list))
X_test=np.array(annot_test_car_images_list)
In [ ]:
# Encoding labels of the images
y_test=annot_test_car_images_df['Image_Class']
y_test=le.fit_transform(y_test)
y_test=np_utils.to_categorical(y_test)
In [ ]:
# X_test and y_test dataframe has been created for further analysis
X_test.shape, y_test.shape
Out[ ]:
((8041, 224, 224, 3), (8041, 196))
In [ ]:
# create an iterator object with write permission - base_model_1.pkl
with open('X_train.pkl', 'wb') as files:
    pickle.dump(X_train, files)
with open('y_train.pkl', 'wb') as files:
    pickle.dump(y_train, files)
with open('X_test.pkl', 'wb') as files:
    pickle.dump(X_test, files)
with open('y_test.pkl', 'wb') as files:
    pickle.dump(y_test, files)
In [ ]:
with open('X_train.pkl' , 'rb') as f:
    X_train = pickle.load(f)
with open('y_train.pkl' , 'rb') as f:
    y_train = pickle.load(f)
with open('X_test.pkl' , 'rb') as f:
    X_test = pickle.load(f)
with open('y_test.pkl' , 'rb') as f:
    y_test = pickle.load(f)

Model training¶

Initializing basic CNN with Architecture as : 3 Conv + 2 Avg pooling + 1 FC layer + 1 FC-softmax layer¶

In [ ]:
backend.clear_session()
tf.random.set_seed(random_state)

# Initialising CNN classifier
base_model_1=Sequential()

# Add a Convolution layer with 6 kernels of 5X5 shape with activation function ReLU
base_model_1.add(Conv2D(filters=6,kernel_size=(5,5),input_shape=(224,224,3),activation='relu'))
base_model_1.add(AveragePooling2D())

# Add another Convolution layer with 16 kernels of 5X5 shape with activation function ReLU
base_model_1.add(Conv2D(filters=16,kernel_size=(5,5),activation='relu'))
base_model_1.add(AveragePooling2D())

# Add another Convolution layer with 120 kernels of 5X5 shape with activation function ReLU
base_model_1.add(Conv2D(filters=120,kernel_size=(5,5),activation='relu'))

# Flattening the layer before fully connected layers
base_model_1.add(Flatten())

# Adding a fully connected layer with 512 neurons
base_model_1.add(Dense(units=84,activation='relu'))

# The final output layer with 5 neuron to predict the categorical classification
base_model_1.add(Dense(units=196,activation='softmax'))

adam = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.001, amsgrad=False)
base_model_1.compile(optimizer = adam, loss = categorical_crossentropy, metrics = ['accuracy'])
2023-06-02 00:55:43.767961: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
In [ ]:
## Looking into our base model
base_model_1.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d (Conv2D)             (None, 220, 220, 6)       456       
                                                                 
 average_pooling2d (AverageP  (None, 110, 110, 6)      0         
 ooling2D)                                                       
                                                                 
 conv2d_1 (Conv2D)           (None, 106, 106, 16)      2416      
                                                                 
 average_pooling2d_1 (Averag  (None, 53, 53, 16)       0         
 ePooling2D)                                                     
                                                                 
 conv2d_2 (Conv2D)           (None, 49, 49, 120)       48120     
                                                                 
 flatten (Flatten)           (None, 288120)            0         
                                                                 
 dense (Dense)               (None, 84)                24202164  
                                                                 
 dense_1 (Dense)             (None, 196)               16660     
                                                                 
=================================================================
Total params: 24,269,816
Trainable params: 24,269,816
Non-trainable params: 0
_________________________________________________________________
In [ ]:
visualkeras_view(base_model_1)
Out[ ]:
In [ ]:
# Fit the model
base_model_1_history=base_model_1.fit(X_train, y_train, validation_data=(X_test, y_test) , epochs=20, batch_size=50, verbose=1, callbacks=create_callbacks('base_model_1'))
Epoch 1/20
163/163 [==============================] - ETA: 0s - loss: 5.3743 - accuracy: 0.0079
Epoch 1: saving model to base_model_1_weights.h5
163/163 [==============================] - 309s 2s/step - loss: 5.3743 - accuracy: 0.0079 - val_loss: 5.2772 - val_accuracy: 0.0085 - lr: 0.0010
Epoch 2/20
163/163 [==============================] - ETA: 0s - loss: 5.2769 - accuracy: 0.0080
Epoch 2: saving model to base_model_1_weights.h5
163/163 [==============================] - 275s 2s/step - loss: 5.2769 - accuracy: 0.0080 - val_loss: 5.2700 - val_accuracy: 0.0078 - lr: 0.0010
Epoch 3/20
163/163 [==============================] - ETA: 0s - loss: 5.1768 - accuracy: 0.0198
Epoch 3: saving model to base_model_1_weights.h5
163/163 [==============================] - 228s 1s/step - loss: 5.1768 - accuracy: 0.0198 - val_loss: 5.1590 - val_accuracy: 0.0178 - lr: 0.0010
Epoch 4/20
163/163 [==============================] - ETA: 0s - loss: 4.4569 - accuracy: 0.0977
Epoch 4: saving model to base_model_1_weights.h5
163/163 [==============================] - 199s 1s/step - loss: 4.4569 - accuracy: 0.0977 - val_loss: 5.2444 - val_accuracy: 0.0238 - lr: 0.0010
Epoch 5/20
163/163 [==============================] - ETA: 0s - loss: 2.8699 - accuracy: 0.3636
Epoch 5: saving model to base_model_1_weights.h5
163/163 [==============================] - 203s 1s/step - loss: 2.8699 - accuracy: 0.3636 - val_loss: 6.4612 - val_accuracy: 0.0279 - lr: 0.0010
Epoch 6/20
163/163 [==============================] - ETA: 0s - loss: 1.1454 - accuracy: 0.7526
Epoch 6: saving model to base_model_1_weights.h5
163/163 [==============================] - 201s 1s/step - loss: 1.1454 - accuracy: 0.7526 - val_loss: 8.4853 - val_accuracy: 0.0279 - lr: 1.0000e-04
Epoch 7/20
163/163 [==============================] - ETA: 0s - loss: 0.8909 - accuracy: 0.7970
Epoch 7: saving model to base_model_1_weights.h5
163/163 [==============================] - 201s 1s/step - loss: 0.8909 - accuracy: 0.7970 - val_loss: 9.3918 - val_accuracy: 0.0267 - lr: 1.0000e-04
Epoch 8/20
163/163 [==============================] - ETA: 0s - loss: 0.7512 - accuracy: 0.8321
Epoch 8: saving model to base_model_1_weights.h5
163/163 [==============================] - 202s 1s/step - loss: 0.7512 - accuracy: 0.8321 - val_loss: 9.5807 - val_accuracy: 0.0267 - lr: 1.0000e-05
Epoch 9/20
163/163 [==============================] - ETA: 0s - loss: 0.7369 - accuracy: 0.8333
Epoch 9: saving model to base_model_1_weights.h5
163/163 [==============================] - 203s 1s/step - loss: 0.7369 - accuracy: 0.8333 - val_loss: 9.7105 - val_accuracy: 0.0270 - lr: 1.0000e-05
Epoch 10/20
163/163 [==============================] - ETA: 0s - loss: 0.7246 - accuracy: 0.8357
Epoch 10: saving model to base_model_1_weights.h5
163/163 [==============================] - 203s 1s/step - loss: 0.7246 - accuracy: 0.8357 - val_loss: 9.8007 - val_accuracy: 0.0270 - lr: 1.0000e-05
Epoch 11/20
163/163 [==============================] - ETA: 0s - loss: 0.7136 - accuracy: 0.8380
Epoch 11: saving model to base_model_1_weights.h5
163/163 [==============================] - 202s 1s/step - loss: 0.7136 - accuracy: 0.8380 - val_loss: 9.8769 - val_accuracy: 0.0271 - lr: 1.0000e-05
In [ ]:
# create an iterator object with write permission - base_model_1.pkl
with open('base_model_1.pkl', 'wb') as files:
    pickle.dump(base_model_1, files)
Keras weights file (<HDF5 file "variables.h5" (mode r+)>) saving:
...layers
......average_pooling2d
.........vars
......average_pooling2d_1
.........vars
......conv2d
.........vars
............0
............1
......conv2d_1
.........vars
............0
............1
......conv2d_2
.........vars
............0
............1
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......flatten
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...vars
Keras model archive saving:
File Name                                             Modified             Size
config.json                                    2023-06-02 01:36:24         4029
metadata.json                                  2023-06-02 01:36:24           64
variables.h5                                   2023-06-02 01:36:25     97107328
In [ ]:
metric_score(base_model_1_history,base_model_1,50,'base_model_1',model_list)
163/163 [==============================] - 38s 226ms/step - loss: 0.7045 - accuracy: 0.8409
161/161 [==============================] - 40s 245ms/step - loss: 9.8769 - accuracy: 0.0271
252/252 [==============================] - 41s 159ms/step

Classification Matrix:
               precision    recall  f1-score   support

           0       0.03      0.05      0.04        44
           1       0.00      0.00      0.00        32
           2       0.00      0.00      0.00        43
           3       0.00      0.00      0.00        42
           4       0.00      0.00      0.00        40
           5       0.04      0.05      0.04        44
           6       0.04      0.03      0.03        39
           7       0.00      0.00      0.00        45
           8       0.00      0.00      0.00        41
           9       0.00      0.00      0.00        33
          10       0.00      0.00      0.00        38
          11       0.00      0.00      0.00        36
          12       0.03      0.05      0.04        41
          13       0.02      0.02      0.02        42
          14       0.00      0.00      0.00        43
          15       0.04      0.02      0.03        43
          16       0.03      0.03      0.03        40
          17       0.00      0.00      0.00        42
          18       0.00      0.00      0.00        40
          19       0.06      0.07      0.06        46
          20       0.00      0.00      0.00        42
          21       0.00      0.00      0.00        42
          22       0.03      0.03      0.03        39
          23       0.00      0.00      0.00        45
          24       0.04      0.03      0.03        39
          25       0.08      0.09      0.08        34
          26       0.00      0.00      0.00        35
          27       0.00      0.00      0.00        41
          28       0.00      0.00      0.00        42
          29       0.06      0.10      0.08        41
          30       0.00      0.00      0.00        44
          31       0.04      0.02      0.03        41
          32       0.00      0.00      0.00        42
          33       0.00      0.00      0.00        44
          34       0.00      0.00      0.00        41
          35       0.05      0.07      0.06        41
          36       0.00      0.00      0.00        38
          37       0.03      0.03      0.03        40
          38       0.02      0.03      0.02        36
          39       0.00      0.00      0.00        39
          40       0.02      0.03      0.02        35
          41       0.04      0.03      0.03        34
          42       0.02      0.02      0.02        46
          43       0.00      0.00      0.00        44
          44       0.00      0.00      0.00        32
          45       0.00      0.00      0.00        43
          46       0.00      0.00      0.00        35
          47       0.04      0.05      0.05        42
          48       0.04      0.03      0.03        37
          49       0.00      0.00      0.00        42
          50       0.00      0.00      0.00        43
          51       0.00      0.00      0.00        41
          52       0.04      0.05      0.04        44
          53       0.00      0.00      0.00        40
          54       0.02      0.03      0.02        39
          55       0.02      0.02      0.02        46
          56       0.05      0.05      0.05        37
          57       0.02      0.02      0.02        44
          58       0.02      0.02      0.02        44
          59       0.06      0.06      0.06        36
          60       0.03      0.02      0.03        43
          61       0.00      0.00      0.00        37
          62       0.02      0.02      0.02        44
          63       0.04      0.03      0.04        29
          64       0.00      0.00      0.00        45
          65       0.03      0.02      0.03        41
          66       0.00      0.00      0.00        38
          67       0.03      0.03      0.03        40
          68       0.03      0.03      0.03        38
          69       0.04      0.05      0.04        42
          70       0.03      0.03      0.03        35
          71       0.03      0.04      0.04        45
          72       0.00      0.00      0.00        44
          73       0.02      0.02      0.02        43
          74       0.00      0.00      0.00        44
          75       0.00      0.00      0.00        43
          76       0.02      0.03      0.02        40
          77       0.00      0.00      0.00        37
          78       0.02      0.02      0.02        48
          79       0.00      0.00      0.00        43
          80       0.02      0.02      0.02        45
          81       0.03      0.02      0.03        45
          82       0.05      0.05      0.05        40
          83       0.08      0.07      0.07        42
          84       0.07      0.09      0.08        43
          85       0.10      0.10      0.10        42
          86       0.04      0.05      0.04        44
          87       0.11      0.15      0.13        39
          88       0.02      0.02      0.02        44
          89       0.05      0.05      0.05        41
          90       0.02      0.03      0.03        38
          91       0.00      0.00      0.00        40
          92       0.12      0.05      0.07        39
          93       0.02      0.02      0.02        43
          94       0.00      0.00      0.00        45
          95       0.00      0.00      0.00        41
          96       0.10      0.10      0.10        42
          97       0.00      0.00      0.00        46
          98       0.08      0.04      0.05        27
          99       0.04      0.03      0.04        33
         100       0.05      0.05      0.05        42
         101       0.02      0.03      0.02        39
         102       0.05      0.05      0.05        39
         103       0.05      0.05      0.05        42
         104       0.07      0.07      0.07        43
         105       0.11      0.07      0.09        41
         106       0.00      0.00      0.00        44
         107       0.05      0.05      0.05        44
         108       0.00      0.00      0.00        44
         109       0.02      0.02      0.02        43
         110       0.00      0.00      0.00        42
         111       0.03      0.02      0.03        45
         112       0.00      0.00      0.00        42
         113       0.02      0.02      0.02        45
         114       0.02      0.02      0.02        45
         115       0.00      0.00      0.00        37
         116       0.00      0.00      0.00        42
         117       0.02      0.02      0.02        41
         118       0.06      0.07      0.07        68
         119       0.08      0.10      0.09        42
         120       0.07      0.07      0.07        44
         121       0.00      0.00      0.00        40
         122       0.02      0.02      0.02        44
         123       0.09      0.08      0.08        39
         124       0.11      0.05      0.07        43
         125       0.02      0.02      0.02        42
         126       0.03      0.02      0.03        41
         127       0.00      0.00      0.00        39
         128       0.04      0.05      0.04        38
         129       0.00      0.00      0.00        41
         130       0.05      0.05      0.05        42
         131       0.00      0.00      0.00        43
         132       0.06      0.02      0.03        42
         133       0.05      0.06      0.05        33
         134       0.03      0.02      0.03        42
         135       0.00      0.00      0.00        24
         136       0.04      0.05      0.04        43
         137       0.04      0.05      0.04        39
         138       0.02      0.02      0.02        42
         139       0.04      0.07      0.06        42
         140       0.00      0.00      0.00        34
         141       0.08      0.06      0.07        32
         142       0.03      0.05      0.04        40
         143       0.02      0.02      0.02        46
         144       0.04      0.05      0.04        44
         145       0.04      0.05      0.05        43
         146       0.05      0.05      0.05        44
         147       0.04      0.07      0.05        45
         148       0.03      0.02      0.03        42
         149       0.03      0.03      0.03        36
         150       0.03      0.02      0.02        43
         151       0.03      0.03      0.03        35
         152       0.00      0.00      0.00        44
         153       0.06      0.05      0.05        42
         154       0.03      0.02      0.03        42
         155       0.05      0.05      0.05        39
         156       0.05      0.06      0.05        36
         157       0.00      0.00      0.00        29
         158       0.00      0.00      0.00        36
         159       0.07      0.07      0.07        44
         160       0.03      0.04      0.03        48
         161       0.00      0.00      0.00        45
         162       0.00      0.00      0.00        36
         163       0.06      0.05      0.05        43
         164       0.00      0.00      0.00        44
         165       0.32      0.27      0.29        41
         166       0.02      0.02      0.02        47
         167       0.04      0.02      0.03        42
         168       0.02      0.03      0.03        38
         169       0.00      0.00      0.00        44
         170       0.02      0.02      0.02        46
         171       0.00      0.00      0.00        44
         172       0.00      0.00      0.00        43
         173       0.05      0.02      0.03        41
         174       0.00      0.00      0.00        30
         175       0.00      0.00      0.00        38
         176       0.04      0.05      0.04        44
         177       0.03      0.02      0.03        41
         178       0.00      0.00      0.00        45
         179       0.04      0.05      0.04        42
         180       0.03      0.03      0.03        38
         181       0.00      0.00      0.00        46
         182       0.05      0.05      0.05        42
         183       0.02      0.03      0.02        40
         184       0.00      0.00      0.00        38
         185       0.04      0.03      0.03        38
         186       0.05      0.05      0.05        43
         187       0.00      0.00      0.00        43
         188       0.00      0.00      0.00        40
         189       0.02      0.02      0.02        43
         190       0.02      0.02      0.02        46
         191       0.02      0.02      0.02        42
         192       0.05      0.02      0.03        41
         193       0.03      0.02      0.03        45
         194       0.02      0.02      0.02        43
         195       0.06      0.05      0.06        40

    accuracy                           0.03      8041
   macro avg       0.03      0.03      0.03      8041
weighted avg       0.03      0.03      0.03      8041

Initializing basic CNN with Architecture as : 5 Conv + 3 Max Pool + 2 FC layers + 1 FC-softmax layer¶

In [ ]:
backend.clear_session()
tf.random.set_seed(random_state)

# Initialising CNN classifier
base_model_2=Sequential()

# Add a Convolution layer with 96 kernels of 11X11 shape with activation function ReLU
base_model_2.add(Conv2D(filters=96,kernel_size=(11,11),strides=(4,4),input_shape=(224,224,3),activation='relu'))
base_model_2.add(BatchNormalization())
base_model_2.add(MaxPool2D(pool_size=(3,3), strides=(2,2)))

# Add another Convolution layer with 256 kernels of 5X5 shape with activation function ReLU
base_model_2.add(Conv2D(filters=256,kernel_size=(5,5),strides=(1,1),activation='relu',padding="same"))
base_model_2.add(BatchNormalization())
base_model_2.add(MaxPool2D(pool_size=(3,3), strides=(2,2)))

# Add another Convolution layer with 384 kernels of 3X3 shape with activation function ReLU
base_model_2.add(Conv2D(filters=384,kernel_size=(3,3),strides=(1,1),activation='relu',padding="same"))
base_model_2.add(BatchNormalization())

# Add another Convolution layer with 384 kernels of 3X3 shape with activation function ReLU
base_model_2.add(Conv2D(filters=384,kernel_size=(3,3),strides=(1,1),activation='relu',padding="same"))
base_model_2.add(BatchNormalization())

# Add another Convolution layer with 256 kernels of 3X3 shape with activation function ReLU
base_model_2.add(Conv2D(filters=256,kernel_size=(3,3),strides=(1,1),activation='relu',padding="same"))
base_model_2.add(BatchNormalization())
base_model_2.add(MaxPool2D(pool_size=(3,3), strides=(2,2)))

# Flattening the layer before fully connected layers
base_model_2.add(Flatten())

# Adding a fully connected layer with 4096 neurons
base_model_2.add(Dense(units=4096,activation='relu'))
base_model_2.add(Dropout(0.5))

# Adding a fully connected layer with 4096 neurons
base_model_2.add(Dense(units=4096,activation='relu'))
base_model_2.add(Dropout(0.5))

# The final output layer with 5 neuron to predict the categorical classification
base_model_2.add(Dense(units=196,activation='softmax'))

adam = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.001, amsgrad=False)
base_model_2.compile(optimizer = adam, loss = categorical_crossentropy, metrics = ['accuracy'])
In [ ]:
## Looking into our base model
base_model_2.summary()
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d (Conv2D)             (None, 54, 54, 96)        34944     
                                                                 
 batch_normalization (BatchN  (None, 54, 54, 96)       384       
 ormalization)                                                   
                                                                 
 max_pooling2d (MaxPooling2D  (None, 26, 26, 96)       0         
 )                                                               
                                                                 
 conv2d_1 (Conv2D)           (None, 26, 26, 256)       614656    
                                                                 
 batch_normalization_1 (Batc  (None, 26, 26, 256)      1024      
 hNormalization)                                                 
                                                                 
 max_pooling2d_1 (MaxPooling  (None, 12, 12, 256)      0         
 2D)                                                             
                                                                 
 conv2d_2 (Conv2D)           (None, 12, 12, 384)       885120    
                                                                 
 batch_normalization_2 (Batc  (None, 12, 12, 384)      1536      
 hNormalization)                                                 
                                                                 
 conv2d_3 (Conv2D)           (None, 12, 12, 384)       1327488   
                                                                 
 batch_normalization_3 (Batc  (None, 12, 12, 384)      1536      
 hNormalization)                                                 
                                                                 
 conv2d_4 (Conv2D)           (None, 12, 12, 256)       884992    
                                                                 
 batch_normalization_4 (Batc  (None, 12, 12, 256)      1024      
 hNormalization)                                                 
                                                                 
 max_pooling2d_2 (MaxPooling  (None, 5, 5, 256)        0         
 2D)                                                             
                                                                 
 flatten (Flatten)           (None, 6400)              0         
                                                                 
 dense (Dense)               (None, 4096)              26218496  
                                                                 
 dropout (Dropout)           (None, 4096)              0         
                                                                 
 dense_1 (Dense)             (None, 4096)              16781312  
                                                                 
 dropout_1 (Dropout)         (None, 4096)              0         
                                                                 
 dense_2 (Dense)             (None, 196)               803012    
                                                                 
=================================================================
Total params: 47,555,524
Trainable params: 47,552,772
Non-trainable params: 2,752
_________________________________________________________________
In [ ]:
visualkeras_view(base_model_2)
Out[ ]:
In [ ]:
# Fit the model
base_model_2_history=base_model_2.fit(X_train, y_train, validation_data=(X_test, y_test) , epochs=50, batch_size=50, verbose=1, callbacks=create_callbacks('base_model_2'))
Epoch 1/50
163/163 [==============================] - ETA: 0s - loss: 6.8761 - accuracy: 0.0061
Epoch 1: saving model to base_model_2_weights.h5
163/163 [==============================] - 325s 2s/step - loss: 6.8761 - accuracy: 0.0061 - val_loss: 5.2500 - val_accuracy: 0.0087 - lr: 0.0010
Epoch 2/50
163/163 [==============================] - ETA: 0s - loss: 5.3210 - accuracy: 0.0135
Epoch 2: saving model to base_model_2_weights.h5
163/163 [==============================] - 310s 2s/step - loss: 5.3210 - accuracy: 0.0135 - val_loss: 5.2265 - val_accuracy: 0.0081 - lr: 0.0010
Epoch 3/50
163/163 [==============================] - ETA: 0s - loss: 5.2126 - accuracy: 0.0125
Epoch 3: saving model to base_model_2_weights.h5
163/163 [==============================] - 315s 2s/step - loss: 5.2126 - accuracy: 0.0125 - val_loss: 5.1348 - val_accuracy: 0.0208 - lr: 0.0010
Epoch 4/50
163/163 [==============================] - ETA: 0s - loss: 5.1705 - accuracy: 0.0163
Epoch 4: saving model to base_model_2_weights.h5
163/163 [==============================] - 308s 2s/step - loss: 5.1705 - accuracy: 0.0163 - val_loss: 5.1857 - val_accuracy: 0.0098 - lr: 0.0010
Epoch 5/50
163/163 [==============================] - ETA: 0s - loss: 5.1177 - accuracy: 0.0163
Epoch 5: saving model to base_model_2_weights.h5
163/163 [==============================] - 312s 2s/step - loss: 5.1177 - accuracy: 0.0163 - val_loss: 5.0444 - val_accuracy: 0.0182 - lr: 0.0010
Epoch 6/50
163/163 [==============================] - ETA: 0s - loss: 5.0826 - accuracy: 0.0183
Epoch 6: saving model to base_model_2_weights.h5
163/163 [==============================] - 307s 2s/step - loss: 5.0826 - accuracy: 0.0183 - val_loss: 5.0217 - val_accuracy: 0.0265 - lr: 0.0010
Epoch 7/50
163/163 [==============================] - ETA: 0s - loss: 5.0241 - accuracy: 0.0226
Epoch 7: saving model to base_model_2_weights.h5
163/163 [==============================] - 307s 2s/step - loss: 5.0241 - accuracy: 0.0226 - val_loss: 5.6371 - val_accuracy: 0.0174 - lr: 0.0010
Epoch 8/50
163/163 [==============================] - ETA: 0s - loss: 4.9721 - accuracy: 0.0247
Epoch 8: saving model to base_model_2_weights.h5
163/163 [==============================] - 307s 2s/step - loss: 4.9721 - accuracy: 0.0247 - val_loss: 5.0003 - val_accuracy: 0.0265 - lr: 0.0010
Epoch 9/50
163/163 [==============================] - ETA: 0s - loss: 4.9116 - accuracy: 0.0307
Epoch 9: saving model to base_model_2_weights.h5
163/163 [==============================] - 315s 2s/step - loss: 4.9116 - accuracy: 0.0307 - val_loss: 4.8620 - val_accuracy: 0.0409 - lr: 0.0010
Epoch 10/50
163/163 [==============================] - ETA: 0s - loss: 4.8295 - accuracy: 0.0363
Epoch 10: saving model to base_model_2_weights.h5
163/163 [==============================] - 307s 2s/step - loss: 4.8295 - accuracy: 0.0363 - val_loss: 5.0497 - val_accuracy: 0.0204 - lr: 0.0010
Epoch 11/50
163/163 [==============================] - ETA: 0s - loss: 4.7436 - accuracy: 0.0436
Epoch 11: saving model to base_model_2_weights.h5
163/163 [==============================] - 301s 2s/step - loss: 4.7436 - accuracy: 0.0436 - val_loss: 4.7586 - val_accuracy: 0.0443 - lr: 0.0010
Epoch 12/50
163/163 [==============================] - ETA: 0s - loss: 4.6268 - accuracy: 0.0519
Epoch 12: saving model to base_model_2_weights.h5
163/163 [==============================] - 300s 2s/step - loss: 4.6268 - accuracy: 0.0519 - val_loss: 4.7004 - val_accuracy: 0.0512 - lr: 0.0010
Epoch 13/50
163/163 [==============================] - ETA: 0s - loss: 4.5445 - accuracy: 0.0607
Epoch 13: saving model to base_model_2_weights.h5
163/163 [==============================] - 299s 2s/step - loss: 4.5445 - accuracy: 0.0607 - val_loss: 4.9098 - val_accuracy: 0.0429 - lr: 0.0010
Epoch 14/50
163/163 [==============================] - ETA: 0s - loss: 4.4438 - accuracy: 0.0734
Epoch 14: saving model to base_model_2_weights.h5
163/163 [==============================] - 301s 2s/step - loss: 4.4438 - accuracy: 0.0734 - val_loss: 4.5358 - val_accuracy: 0.0669 - lr: 0.0010
Epoch 15/50
163/163 [==============================] - ETA: 0s - loss: 4.3129 - accuracy: 0.0857
Epoch 15: saving model to base_model_2_weights.h5
163/163 [==============================] - 303s 2s/step - loss: 4.3129 - accuracy: 0.0857 - val_loss: 4.4460 - val_accuracy: 0.0744 - lr: 0.0010
Epoch 16/50
163/163 [==============================] - ETA: 0s - loss: 4.1993 - accuracy: 0.0964
Epoch 16: saving model to base_model_2_weights.h5
163/163 [==============================] - 301s 2s/step - loss: 4.1993 - accuracy: 0.0964 - val_loss: 4.5149 - val_accuracy: 0.0678 - lr: 0.0010
Epoch 17/50
163/163 [==============================] - ETA: 0s - loss: 4.0363 - accuracy: 0.1162
Epoch 17: saving model to base_model_2_weights.h5
163/163 [==============================] - 300s 2s/step - loss: 4.0363 - accuracy: 0.1162 - val_loss: 4.3685 - val_accuracy: 0.0878 - lr: 0.0010
Epoch 18/50
163/163 [==============================] - ETA: 0s - loss: 3.8954 - accuracy: 0.1353
Epoch 18: saving model to base_model_2_weights.h5
163/163 [==============================] - 305s 2s/step - loss: 3.8954 - accuracy: 0.1353 - val_loss: 4.2952 - val_accuracy: 0.0938 - lr: 0.0010
Epoch 19/50
163/163 [==============================] - ETA: 0s - loss: 3.7376 - accuracy: 0.1510
Epoch 19: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 3.7376 - accuracy: 0.1510 - val_loss: 4.1317 - val_accuracy: 0.1122 - lr: 0.0010
Epoch 20/50
163/163 [==============================] - ETA: 0s - loss: 3.5530 - accuracy: 0.1838
Epoch 20: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 3.5530 - accuracy: 0.1838 - val_loss: 4.3190 - val_accuracy: 0.0943 - lr: 0.0010
Epoch 21/50
163/163 [==============================] - ETA: 0s - loss: 3.4047 - accuracy: 0.2040
Epoch 21: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 3.4047 - accuracy: 0.2040 - val_loss: 4.0529 - val_accuracy: 0.1259 - lr: 0.0010
Epoch 22/50
163/163 [==============================] - ETA: 0s - loss: 3.2221 - accuracy: 0.2349
Epoch 22: saving model to base_model_2_weights.h5
163/163 [==============================] - 297s 2s/step - loss: 3.2221 - accuracy: 0.2349 - val_loss: 4.2137 - val_accuracy: 0.1157 - lr: 0.0010
Epoch 23/50
163/163 [==============================] - ETA: 0s - loss: 3.0432 - accuracy: 0.2666
Epoch 23: saving model to base_model_2_weights.h5
163/163 [==============================] - 298s 2s/step - loss: 3.0432 - accuracy: 0.2666 - val_loss: 4.0578 - val_accuracy: 0.1476 - lr: 0.0010
Epoch 24/50
163/163 [==============================] - ETA: 0s - loss: 2.6887 - accuracy: 0.3314
Epoch 24: saving model to base_model_2_weights.h5
163/163 [==============================] - 298s 2s/step - loss: 2.6887 - accuracy: 0.3314 - val_loss: 3.7714 - val_accuracy: 0.1754 - lr: 1.0000e-04
Epoch 25/50
163/163 [==============================] - ETA: 0s - loss: 2.6011 - accuracy: 0.3495
Epoch 25: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 2.6011 - accuracy: 0.3495 - val_loss: 3.7501 - val_accuracy: 0.1771 - lr: 1.0000e-04
Epoch 26/50
163/163 [==============================] - ETA: 0s - loss: 2.5033 - accuracy: 0.3669
Epoch 26: saving model to base_model_2_weights.h5
163/163 [==============================] - 297s 2s/step - loss: 2.5033 - accuracy: 0.3669 - val_loss: 3.7328 - val_accuracy: 0.1831 - lr: 1.0000e-04
Epoch 27/50
163/163 [==============================] - ETA: 0s - loss: 2.4395 - accuracy: 0.3810
Epoch 27: saving model to base_model_2_weights.h5
163/163 [==============================] - 302s 2s/step - loss: 2.4395 - accuracy: 0.3810 - val_loss: 3.7319 - val_accuracy: 0.1817 - lr: 1.0000e-04
Epoch 28/50
163/163 [==============================] - ETA: 0s - loss: 2.4142 - accuracy: 0.3881
Epoch 28: saving model to base_model_2_weights.h5
163/163 [==============================] - 299s 2s/step - loss: 2.4142 - accuracy: 0.3881 - val_loss: 3.7204 - val_accuracy: 0.1844 - lr: 1.0000e-04
Epoch 29/50
163/163 [==============================] - ETA: 0s - loss: 2.3735 - accuracy: 0.3934
Epoch 29: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 2.3735 - accuracy: 0.3934 - val_loss: 3.6943 - val_accuracy: 0.1911 - lr: 1.0000e-04
Epoch 30/50
163/163 [==============================] - ETA: 0s - loss: 2.3222 - accuracy: 0.4110
Epoch 30: saving model to base_model_2_weights.h5
163/163 [==============================] - 299s 2s/step - loss: 2.3222 - accuracy: 0.4110 - val_loss: 3.7075 - val_accuracy: 0.1940 - lr: 1.0000e-04
Epoch 31/50
163/163 [==============================] - ETA: 0s - loss: 2.2952 - accuracy: 0.4085
Epoch 31: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 2.2952 - accuracy: 0.4085 - val_loss: 3.7056 - val_accuracy: 0.1926 - lr: 1.0000e-04
Epoch 32/50
163/163 [==============================] - ETA: 0s - loss: 2.2615 - accuracy: 0.4213
Epoch 32: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 2.2615 - accuracy: 0.4213 - val_loss: 3.7025 - val_accuracy: 0.1952 - lr: 1.0000e-05
Epoch 33/50
163/163 [==============================] - ETA: 0s - loss: 2.2638 - accuracy: 0.4158
Epoch 33: saving model to base_model_2_weights.h5
163/163 [==============================] - 296s 2s/step - loss: 2.2638 - accuracy: 0.4158 - val_loss: 3.6987 - val_accuracy: 0.1969 - lr: 1.0000e-05
Epoch 34/50
163/163 [==============================] - ETA: 0s - loss: 2.2551 - accuracy: 0.4206
Epoch 34: saving model to base_model_2_weights.h5
163/163 [==============================] - 298s 2s/step - loss: 2.2551 - accuracy: 0.4206 - val_loss: 3.6958 - val_accuracy: 0.1971 - lr: 1.0000e-05
Epoch 35/50
163/163 [==============================] - ETA: 0s - loss: 2.2224 - accuracy: 0.4235
Epoch 35: saving model to base_model_2_weights.h5
163/163 [==============================] - 298s 2s/step - loss: 2.2224 - accuracy: 0.4235 - val_loss: 3.6988 - val_accuracy: 0.1955 - lr: 1.0000e-05
Epoch 36/50
163/163 [==============================] - ETA: 0s - loss: 2.2309 - accuracy: 0.4289
Epoch 36: saving model to base_model_2_weights.h5
163/163 [==============================] - 298s 2s/step - loss: 2.2309 - accuracy: 0.4289 - val_loss: 3.6987 - val_accuracy: 0.1962 - lr: 1.0000e-05
Epoch 37/50
163/163 [==============================] - ETA: 0s - loss: 2.2266 - accuracy: 0.4269
Epoch 37: saving model to base_model_2_weights.h5
163/163 [==============================] - 299s 2s/step - loss: 2.2266 - accuracy: 0.4269 - val_loss: 3.6971 - val_accuracy: 0.1970 - lr: 1.0000e-05
In [ ]:
# create an iterator object with write permission - base_model_2.pkl
with open('base_model_2.pkl', 'wb') as files:
    pickle.dump(base_model_2, files)
Keras weights file (<HDF5 file "variables.h5" (mode r+)>) saving:
...layers
......batch_normalization
.........vars
............0
............1
............2
............3
......batch_normalization_1
.........vars
............0
............1
............2
............3
......batch_normalization_2
.........vars
............0
............1
............2
............3
......batch_normalization_3
.........vars
............0
............1
............2
............3
......batch_normalization_4
.........vars
............0
............1
............2
............3
......conv2d
.........vars
............0
............1
......conv2d_1
.........vars
............0
............1
......conv2d_2
.........vars
............0
............1
......conv2d_3
.........vars
............0
............1
......conv2d_4
.........vars
............0
............1
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......dense_2
.........vars
............0
............1
......dropout
.........vars
......dropout_1
.........vars
......flatten
.........vars
......max_pooling2d
.........vars
......max_pooling2d_1
.........vars
......max_pooling2d_2
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...vars
Keras model archive saving:
File Name                                             Modified             Size
config.json                                    2023-06-02 04:47:42         8850
metadata.json                                  2023-06-02 04:47:42           64
variables.h5                                   2023-06-02 04:47:42    190281744
In [ ]:
metric_score(base_model_2_history,base_model_2,50,'base_model_2',model_list)
163/163 [==============================] - 57s 344ms/step - loss: 1.6347 - accuracy: 0.6720
161/161 [==============================] - 59s 359ms/step - loss: 3.6971 - accuracy: 0.1970
252/252 [==============================] - 62s 243ms/step

Classification Matrix:
               precision    recall  f1-score   support

           0       0.41      0.66      0.50        44
           1       0.00      0.00      0.00        32
           2       0.10      0.14      0.11        43
           3       0.04      0.05      0.05        42
           4       0.08      0.07      0.08        40
           5       0.30      0.36      0.33        44
           6       0.28      0.28      0.28        39
           7       0.06      0.04      0.05        45
           8       0.18      0.17      0.18        41
           9       0.09      0.12      0.10        33
          10       0.38      0.74      0.50        38
          11       0.23      0.19      0.21        36
          12       0.10      0.07      0.09        41
          13       0.05      0.02      0.03        42
          14       0.17      0.09      0.12        43
          15       0.10      0.14      0.12        43
          16       0.24      0.23      0.23        40
          17       0.33      0.24      0.28        42
          18       0.11      0.12      0.12        40
          19       0.07      0.07      0.07        46
          20       0.14      0.17      0.15        42
          21       0.05      0.05      0.05        42
          22       0.20      0.13      0.16        39
          23       0.17      0.07      0.10        45
          24       0.26      0.44      0.32        39
          25       0.15      0.26      0.19        34
          26       0.00      0.00      0.00        35
          27       0.12      0.10      0.11        41
          28       0.16      0.14      0.15        42
          29       0.21      0.17      0.19        41
          30       0.10      0.09      0.10        44
          31       0.16      0.20      0.18        41
          32       0.19      0.19      0.19        42
          33       0.13      0.18      0.15        44
          34       0.18      0.07      0.10        41
          35       0.14      0.07      0.10        41
          36       0.25      0.26      0.26        38
          37       0.18      0.23      0.20        40
          38       0.53      0.25      0.34        36
          39       0.08      0.18      0.11        39
          40       0.31      0.31      0.31        35
          41       0.18      0.26      0.21        34
          42       0.09      0.11      0.10        46
          43       0.04      0.05      0.04        44
          44       0.33      0.56      0.41        32
          45       0.18      0.40      0.25        43
          46       0.04      0.03      0.03        35
          47       0.14      0.14      0.14        42
          48       0.14      0.19      0.16        37
          49       0.04      0.02      0.03        42
          50       0.02      0.02      0.02        43
          51       0.12      0.12      0.12        41
          52       0.21      0.11      0.15        44
          53       0.16      0.17      0.17        40
          54       0.24      0.13      0.17        39
          55       0.07      0.04      0.05        46
          56       0.30      0.46      0.36        37
          57       0.08      0.11      0.09        44
          58       0.17      0.11      0.14        44
          59       0.31      0.31      0.31        36
          60       0.06      0.02      0.03        43
          61       0.15      0.22      0.17        37
          62       0.19      0.09      0.12        44
          63       0.30      0.31      0.31        29
          64       0.12      0.13      0.12        45
          65       0.24      0.20      0.21        41
          66       0.23      0.08      0.12        38
          67       0.17      0.23      0.20        40
          68       0.29      0.24      0.26        38
          69       0.10      0.19      0.13        42
          70       0.13      0.17      0.15        35
          71       0.15      0.09      0.11        45
          72       0.10      0.11      0.10        44
          73       0.11      0.12      0.11        43
          74       0.18      0.23      0.20        44
          75       0.13      0.16      0.15        43
          76       0.15      0.10      0.12        40
          77       0.15      0.30      0.20        37
          78       0.21      0.12      0.16        48
          79       0.19      0.14      0.16        43
          80       0.21      0.07      0.10        45
          81       0.13      0.18      0.15        45
          82       0.00      0.00      0.00        40
          83       0.27      0.50      0.35        42
          84       0.27      0.56      0.36        43
          85       0.35      0.57      0.44        42
          86       0.15      0.16      0.16        44
          87       0.46      0.56      0.51        39
          88       0.15      0.18      0.16        44
          89       0.15      0.15      0.15        41
          90       0.19      0.26      0.22        38
          91       0.29      0.10      0.15        40
          92       0.44      0.21      0.28        39
          93       0.17      0.09      0.12        43
          94       0.08      0.07      0.07        45
          95       0.12      0.02      0.04        41
          96       0.20      0.14      0.17        42
          97       0.08      0.09      0.08        46
          98       0.30      0.52      0.38        27
          99       0.39      0.52      0.44        33
         100       0.24      0.29      0.26        42
         101       0.32      0.41      0.36        39
         102       0.32      0.31      0.32        39
         103       0.17      0.36      0.23        42
         104       0.17      0.21      0.19        43
         105       0.30      0.54      0.38        41
         106       0.43      0.14      0.21        44
         107       0.16      0.20      0.18        44
         108       0.50      0.48      0.49        44
         109       0.30      0.23      0.26        43
         110       0.14      0.05      0.07        42
         111       0.17      0.18      0.17        45
         112       0.19      0.17      0.18        42
         113       0.12      0.11      0.11        45
         114       0.09      0.09      0.09        45
         115       0.35      0.46      0.40        37
         116       0.13      0.05      0.07        42
         117       0.27      0.15      0.19        41
         118       0.45      0.44      0.44        68
         119       0.30      0.24      0.27        42
         120       0.21      0.14      0.17        44
         121       0.15      0.12      0.14        40
         122       0.40      0.48      0.43        44
         123       0.17      0.10      0.13        39
         124       0.35      0.33      0.34        43
         125       0.14      0.24      0.18        42
         126       0.16      0.44      0.23        41
         127       0.14      0.10      0.12        39
         128       0.11      0.05      0.07        38
         129       0.10      0.05      0.06        41
         130       0.27      0.26      0.27        42
         131       0.00      0.00      0.00        43
         132       0.12      0.05      0.07        42
         133       0.24      0.15      0.19        33
         134       0.15      0.19      0.17        42
         135       0.05      0.04      0.04        24
         136       0.15      0.16      0.15        43
         137       0.17      0.31      0.22        39
         138       0.17      0.02      0.04        42
         139       0.15      0.26      0.19        42
         140       0.00      0.00      0.00        34
         141       0.29      0.12      0.17        32
         142       0.29      0.25      0.27        40
         143       0.14      0.13      0.13        46
         144       0.20      0.25      0.22        44
         145       0.31      0.53      0.39        43
         146       0.05      0.07      0.06        44
         147       0.05      0.04      0.05        45
         148       0.32      0.29      0.30        42
         149       0.39      0.47      0.42        36
         150       0.45      0.42      0.43        43
         151       0.67      0.80      0.73        35
         152       0.40      0.75      0.52        44
         153       0.29      0.17      0.21        42
         154       0.25      0.07      0.11        42
         155       0.14      0.15      0.15        39
         156       0.48      0.61      0.54        36
         157       0.16      0.28      0.20        29
         158       0.09      0.08      0.09        36
         159       0.45      0.57      0.51        44
         160       0.25      0.08      0.12        48
         161       0.44      0.38      0.40        45
         162       0.23      0.19      0.21        36
         163       0.19      0.16      0.18        43
         164       0.12      0.07      0.09        44
         165       0.55      0.51      0.53        41
         166       0.06      0.06      0.06        47
         167       0.31      0.48      0.37        42
         168       0.18      0.16      0.17        38
         169       0.16      0.11      0.13        44
         170       0.09      0.07      0.07        46
         171       0.20      0.25      0.22        44
         172       0.08      0.09      0.09        43
         173       0.31      0.27      0.29        41
         174       0.12      0.13      0.13        30
         175       0.07      0.08      0.07        38
         176       0.23      0.25      0.24        44
         177       0.09      0.10      0.09        41
         178       0.11      0.11      0.11        45
         179       0.23      0.19      0.21        42
         180       0.12      0.29      0.17        38
         181       0.05      0.02      0.03        46
         182       0.12      0.10      0.11        42
         183       0.12      0.03      0.04        40
         184       0.05      0.08      0.06        38
         185       0.19      0.16      0.17        38
         186       0.14      0.12      0.12        43
         187       0.05      0.05      0.05        43
         188       0.17      0.20      0.18        40
         189       0.22      0.05      0.08        43
         190       0.17      0.13      0.15        46
         191       0.11      0.05      0.07        42
         192       0.10      0.07      0.08        41
         193       0.13      0.13      0.13        45
         194       0.05      0.02      0.03        43
         195       0.21      0.25      0.23        40

    accuracy                           0.20      8041
   macro avg       0.19      0.20      0.19      8041
weighted avg       0.19      0.20      0.18      8041

Initializing basic CNN with Architecture as : ResNet50 deep layer¶

In [ ]:
def res_identity(x, filters):

  x_skip = x # this will be used for addition with the residual block
  f1, f2 = filters

  #first block
  x = Conv2D(f1, kernel_size=(1, 1), strides=(1, 1), padding='valid', kernel_regularizer=l2(0.001))(x)
  x = BatchNormalization()(x)
  x = Activation(activations.relu)(x)

  #second block # bottleneck (but size kept same with padding)
  x = Conv2D(f1, kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_regularizer=l2(0.001))(x)
  x = BatchNormalization()(x)
  x = Activation(activations.relu)(x)

  # third block activation used after adding the input
  x = Conv2D(f2, kernel_size=(1, 1), strides=(1, 1), padding='valid', kernel_regularizer=l2(0.001))(x)
  x = BatchNormalization()(x)
  # x = Activation(activations.relu)(x)

  # add the input
  x = Add()([x, x_skip])
  x = Activation(activations.relu)(x)

  return x
In [ ]:
def res_conv(x, s, filters):
  x_skip = x
  f1, f2 = filters

  # first block
  x = Conv2D(f1, kernel_size=(1, 1), strides=(s, s), padding='valid', kernel_regularizer=l2(0.001))(x)
  # when s = 2 then it is like downsizing the feature map
  x = BatchNormalization()(x)
  x = Activation(activations.relu)(x)

  # second block
  x = Conv2D(f1, kernel_size=(3, 3), strides=(1, 1), padding='same', kernel_regularizer=l2(0.001))(x)
  x = BatchNormalization()(x)
  x = Activation(activations.relu)(x)

  #third block
  x = Conv2D(f2, kernel_size=(1, 1), strides=(1, 1), padding='valid', kernel_regularizer=l2(0.001))(x)
  x = BatchNormalization()(x)

  # shortcut
  x_skip = Conv2D(f2, kernel_size=(1, 1), strides=(s, s), padding='valid', kernel_regularizer=l2(0.001))(x_skip)
  x_skip = BatchNormalization()(x_skip)

  # add
  x = Add()([x, x_skip])
  x = Activation(activations.relu)(x)

  return x
In [ ]:
backend.clear_session()
tf.random.set_seed(random_state)

def resnet50():

  input_im = Input(shape=(224, 224, 3)) # cifar 10 images size
  x = ZeroPadding2D(padding=(3, 3))(input_im)

  # 1st stage
  # here we perform maxpooling, see the figure above

  x = Conv2D(64, kernel_size=(7, 7), strides=(2, 2))(x)
  x = BatchNormalization()(x)
  x = Activation(activations.relu)(x)
  x = MaxPooling2D((3, 3), strides=(2, 2))(x)

  #2nd stage
  # frm here on only conv block and identity block, no pooling

  x = res_conv(x, s=1, filters=(64, 256))
  x = res_identity(x, filters=(64, 256))
  x = res_identity(x, filters=(64, 256))

  # 3rd stage

  x = res_conv(x, s=2, filters=(128, 512))
  x = res_identity(x, filters=(128, 512))
  x = res_identity(x, filters=(128, 512))
  x = res_identity(x, filters=(128, 512))

  # 4th stage

  x = res_conv(x, s=2, filters=(256, 1024))
  x = res_identity(x, filters=(256, 1024))
  x = res_identity(x, filters=(256, 1024))
  x = res_identity(x, filters=(256, 1024))
  x = res_identity(x, filters=(256, 1024))
  x = res_identity(x, filters=(256, 1024))

  # 5th stage

  x = res_conv(x, s=2, filters=(512, 2048))
  x = res_identity(x, filters=(512, 2048))
  x = res_identity(x, filters=(512, 2048))

  # ends with average pooling and dense connection

  x = AveragePooling2D((2, 2), padding='same')(x)

  x = Flatten()(x)
  x = Dense(196, activation='softmax', kernel_initializer='he_normal')(x) #multi-class

  # define the model

  model = Model(inputs=input_im, outputs=x)

  return model

base_model_3=resnet50()
adam = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, epsilon=None, decay=0.001, amsgrad=False)
base_model_3.compile(optimizer = adam, loss = categorical_crossentropy, metrics = ['accuracy'])
In [ ]:
## Looking into our base model
base_model_3.summary()
Model: "model"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_1 (InputLayer)           [(None, 224, 224, 3  0           []                               
                                )]                                                                
                                                                                                  
 zero_padding2d (ZeroPadding2D)  (None, 230, 230, 3)  0          ['input_1[0][0]']                
                                                                                                  
 conv2d (Conv2D)                (None, 112, 112, 64  9472        ['zero_padding2d[0][0]']         
                                )                                                                 
                                                                                                  
 batch_normalization (BatchNorm  (None, 112, 112, 64  256        ['conv2d[0][0]']                 
 alization)                     )                                                                 
                                                                                                  
 activation (Activation)        (None, 112, 112, 64  0           ['batch_normalization[0][0]']    
                                )                                                                 
                                                                                                  
 max_pooling2d (MaxPooling2D)   (None, 55, 55, 64)   0           ['activation[0][0]']             
                                                                                                  
 conv2d_1 (Conv2D)              (None, 55, 55, 64)   4160        ['max_pooling2d[0][0]']          
                                                                                                  
 batch_normalization_1 (BatchNo  (None, 55, 55, 64)  256         ['conv2d_1[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_1 (Activation)      (None, 55, 55, 64)   0           ['batch_normalization_1[0][0]']  
                                                                                                  
 conv2d_2 (Conv2D)              (None, 55, 55, 64)   36928       ['activation_1[0][0]']           
                                                                                                  
 batch_normalization_2 (BatchNo  (None, 55, 55, 64)  256         ['conv2d_2[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_2 (Activation)      (None, 55, 55, 64)   0           ['batch_normalization_2[0][0]']  
                                                                                                  
 conv2d_3 (Conv2D)              (None, 55, 55, 256)  16640       ['activation_2[0][0]']           
                                                                                                  
 conv2d_4 (Conv2D)              (None, 55, 55, 256)  16640       ['max_pooling2d[0][0]']          
                                                                                                  
 batch_normalization_3 (BatchNo  (None, 55, 55, 256)  1024       ['conv2d_3[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 batch_normalization_4 (BatchNo  (None, 55, 55, 256)  1024       ['conv2d_4[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 add (Add)                      (None, 55, 55, 256)  0           ['batch_normalization_3[0][0]',  
                                                                  'batch_normalization_4[0][0]']  
                                                                                                  
 activation_3 (Activation)      (None, 55, 55, 256)  0           ['add[0][0]']                    
                                                                                                  
 conv2d_5 (Conv2D)              (None, 55, 55, 64)   16448       ['activation_3[0][0]']           
                                                                                                  
 batch_normalization_5 (BatchNo  (None, 55, 55, 64)  256         ['conv2d_5[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_4 (Activation)      (None, 55, 55, 64)   0           ['batch_normalization_5[0][0]']  
                                                                                                  
 conv2d_6 (Conv2D)              (None, 55, 55, 64)   36928       ['activation_4[0][0]']           
                                                                                                  
 batch_normalization_6 (BatchNo  (None, 55, 55, 64)  256         ['conv2d_6[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_5 (Activation)      (None, 55, 55, 64)   0           ['batch_normalization_6[0][0]']  
                                                                                                  
 conv2d_7 (Conv2D)              (None, 55, 55, 256)  16640       ['activation_5[0][0]']           
                                                                                                  
 batch_normalization_7 (BatchNo  (None, 55, 55, 256)  1024       ['conv2d_7[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 add_1 (Add)                    (None, 55, 55, 256)  0           ['batch_normalization_7[0][0]',  
                                                                  'activation_3[0][0]']           
                                                                                                  
 activation_6 (Activation)      (None, 55, 55, 256)  0           ['add_1[0][0]']                  
                                                                                                  
 conv2d_8 (Conv2D)              (None, 55, 55, 64)   16448       ['activation_6[0][0]']           
                                                                                                  
 batch_normalization_8 (BatchNo  (None, 55, 55, 64)  256         ['conv2d_8[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_7 (Activation)      (None, 55, 55, 64)   0           ['batch_normalization_8[0][0]']  
                                                                                                  
 conv2d_9 (Conv2D)              (None, 55, 55, 64)   36928       ['activation_7[0][0]']           
                                                                                                  
 batch_normalization_9 (BatchNo  (None, 55, 55, 64)  256         ['conv2d_9[0][0]']               
 rmalization)                                                                                     
                                                                                                  
 activation_8 (Activation)      (None, 55, 55, 64)   0           ['batch_normalization_9[0][0]']  
                                                                                                  
 conv2d_10 (Conv2D)             (None, 55, 55, 256)  16640       ['activation_8[0][0]']           
                                                                                                  
 batch_normalization_10 (BatchN  (None, 55, 55, 256)  1024       ['conv2d_10[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_2 (Add)                    (None, 55, 55, 256)  0           ['batch_normalization_10[0][0]', 
                                                                  'activation_6[0][0]']           
                                                                                                  
 activation_9 (Activation)      (None, 55, 55, 256)  0           ['add_2[0][0]']                  
                                                                                                  
 conv2d_11 (Conv2D)             (None, 28, 28, 128)  32896       ['activation_9[0][0]']           
                                                                                                  
 batch_normalization_11 (BatchN  (None, 28, 28, 128)  512        ['conv2d_11[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_10 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_11[0][0]'] 
                                                                                                  
 conv2d_12 (Conv2D)             (None, 28, 28, 128)  147584      ['activation_10[0][0]']          
                                                                                                  
 batch_normalization_12 (BatchN  (None, 28, 28, 128)  512        ['conv2d_12[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_11 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_12[0][0]'] 
                                                                                                  
 conv2d_13 (Conv2D)             (None, 28, 28, 512)  66048       ['activation_11[0][0]']          
                                                                                                  
 conv2d_14 (Conv2D)             (None, 28, 28, 512)  131584      ['activation_9[0][0]']           
                                                                                                  
 batch_normalization_13 (BatchN  (None, 28, 28, 512)  2048       ['conv2d_13[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 batch_normalization_14 (BatchN  (None, 28, 28, 512)  2048       ['conv2d_14[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_3 (Add)                    (None, 28, 28, 512)  0           ['batch_normalization_13[0][0]', 
                                                                  'batch_normalization_14[0][0]'] 
                                                                                                  
 activation_12 (Activation)     (None, 28, 28, 512)  0           ['add_3[0][0]']                  
                                                                                                  
 conv2d_15 (Conv2D)             (None, 28, 28, 128)  65664       ['activation_12[0][0]']          
                                                                                                  
 batch_normalization_15 (BatchN  (None, 28, 28, 128)  512        ['conv2d_15[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_13 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_15[0][0]'] 
                                                                                                  
 conv2d_16 (Conv2D)             (None, 28, 28, 128)  147584      ['activation_13[0][0]']          
                                                                                                  
 batch_normalization_16 (BatchN  (None, 28, 28, 128)  512        ['conv2d_16[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_14 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_16[0][0]'] 
                                                                                                  
 conv2d_17 (Conv2D)             (None, 28, 28, 512)  66048       ['activation_14[0][0]']          
                                                                                                  
 batch_normalization_17 (BatchN  (None, 28, 28, 512)  2048       ['conv2d_17[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_4 (Add)                    (None, 28, 28, 512)  0           ['batch_normalization_17[0][0]', 
                                                                  'activation_12[0][0]']          
                                                                                                  
 activation_15 (Activation)     (None, 28, 28, 512)  0           ['add_4[0][0]']                  
                                                                                                  
 conv2d_18 (Conv2D)             (None, 28, 28, 128)  65664       ['activation_15[0][0]']          
                                                                                                  
 batch_normalization_18 (BatchN  (None, 28, 28, 128)  512        ['conv2d_18[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_16 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_18[0][0]'] 
                                                                                                  
 conv2d_19 (Conv2D)             (None, 28, 28, 128)  147584      ['activation_16[0][0]']          
                                                                                                  
 batch_normalization_19 (BatchN  (None, 28, 28, 128)  512        ['conv2d_19[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_17 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_19[0][0]'] 
                                                                                                  
 conv2d_20 (Conv2D)             (None, 28, 28, 512)  66048       ['activation_17[0][0]']          
                                                                                                  
 batch_normalization_20 (BatchN  (None, 28, 28, 512)  2048       ['conv2d_20[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_5 (Add)                    (None, 28, 28, 512)  0           ['batch_normalization_20[0][0]', 
                                                                  'activation_15[0][0]']          
                                                                                                  
 activation_18 (Activation)     (None, 28, 28, 512)  0           ['add_5[0][0]']                  
                                                                                                  
 conv2d_21 (Conv2D)             (None, 28, 28, 128)  65664       ['activation_18[0][0]']          
                                                                                                  
 batch_normalization_21 (BatchN  (None, 28, 28, 128)  512        ['conv2d_21[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_19 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_21[0][0]'] 
                                                                                                  
 conv2d_22 (Conv2D)             (None, 28, 28, 128)  147584      ['activation_19[0][0]']          
                                                                                                  
 batch_normalization_22 (BatchN  (None, 28, 28, 128)  512        ['conv2d_22[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_20 (Activation)     (None, 28, 28, 128)  0           ['batch_normalization_22[0][0]'] 
                                                                                                  
 conv2d_23 (Conv2D)             (None, 28, 28, 512)  66048       ['activation_20[0][0]']          
                                                                                                  
 batch_normalization_23 (BatchN  (None, 28, 28, 512)  2048       ['conv2d_23[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_6 (Add)                    (None, 28, 28, 512)  0           ['batch_normalization_23[0][0]', 
                                                                  'activation_18[0][0]']          
                                                                                                  
 activation_21 (Activation)     (None, 28, 28, 512)  0           ['add_6[0][0]']                  
                                                                                                  
 conv2d_24 (Conv2D)             (None, 14, 14, 256)  131328      ['activation_21[0][0]']          
                                                                                                  
 batch_normalization_24 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_24[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_22 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_24[0][0]'] 
                                                                                                  
 conv2d_25 (Conv2D)             (None, 14, 14, 256)  590080      ['activation_22[0][0]']          
                                                                                                  
 batch_normalization_25 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_25[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_23 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_25[0][0]'] 
                                                                                                  
 conv2d_26 (Conv2D)             (None, 14, 14, 1024  263168      ['activation_23[0][0]']          
                                )                                                                 
                                                                                                  
 conv2d_27 (Conv2D)             (None, 14, 14, 1024  525312      ['activation_21[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_26 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_26[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 batch_normalization_27 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_27[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_7 (Add)                    (None, 14, 14, 1024  0           ['batch_normalization_26[0][0]', 
                                )                                 'batch_normalization_27[0][0]'] 
                                                                                                  
 activation_24 (Activation)     (None, 14, 14, 1024  0           ['add_7[0][0]']                  
                                )                                                                 
                                                                                                  
 conv2d_28 (Conv2D)             (None, 14, 14, 256)  262400      ['activation_24[0][0]']          
                                                                                                  
 batch_normalization_28 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_28[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_25 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_28[0][0]'] 
                                                                                                  
 conv2d_29 (Conv2D)             (None, 14, 14, 256)  590080      ['activation_25[0][0]']          
                                                                                                  
 batch_normalization_29 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_29[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_26 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_29[0][0]'] 
                                                                                                  
 conv2d_30 (Conv2D)             (None, 14, 14, 1024  263168      ['activation_26[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_30 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_30[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_8 (Add)                    (None, 14, 14, 1024  0           ['batch_normalization_30[0][0]', 
                                )                                 'activation_24[0][0]']          
                                                                                                  
 activation_27 (Activation)     (None, 14, 14, 1024  0           ['add_8[0][0]']                  
                                )                                                                 
                                                                                                  
 conv2d_31 (Conv2D)             (None, 14, 14, 256)  262400      ['activation_27[0][0]']          
                                                                                                  
 batch_normalization_31 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_31[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_28 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_31[0][0]'] 
                                                                                                  
 conv2d_32 (Conv2D)             (None, 14, 14, 256)  590080      ['activation_28[0][0]']          
                                                                                                  
 batch_normalization_32 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_32[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_29 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_32[0][0]'] 
                                                                                                  
 conv2d_33 (Conv2D)             (None, 14, 14, 1024  263168      ['activation_29[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_33 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_33[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_9 (Add)                    (None, 14, 14, 1024  0           ['batch_normalization_33[0][0]', 
                                )                                 'activation_27[0][0]']          
                                                                                                  
 activation_30 (Activation)     (None, 14, 14, 1024  0           ['add_9[0][0]']                  
                                )                                                                 
                                                                                                  
 conv2d_34 (Conv2D)             (None, 14, 14, 256)  262400      ['activation_30[0][0]']          
                                                                                                  
 batch_normalization_34 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_34[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_31 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_34[0][0]'] 
                                                                                                  
 conv2d_35 (Conv2D)             (None, 14, 14, 256)  590080      ['activation_31[0][0]']          
                                                                                                  
 batch_normalization_35 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_35[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_32 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_35[0][0]'] 
                                                                                                  
 conv2d_36 (Conv2D)             (None, 14, 14, 1024  263168      ['activation_32[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_36 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_36[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_10 (Add)                   (None, 14, 14, 1024  0           ['batch_normalization_36[0][0]', 
                                )                                 'activation_30[0][0]']          
                                                                                                  
 activation_33 (Activation)     (None, 14, 14, 1024  0           ['add_10[0][0]']                 
                                )                                                                 
                                                                                                  
 conv2d_37 (Conv2D)             (None, 14, 14, 256)  262400      ['activation_33[0][0]']          
                                                                                                  
 batch_normalization_37 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_37[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_34 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_37[0][0]'] 
                                                                                                  
 conv2d_38 (Conv2D)             (None, 14, 14, 256)  590080      ['activation_34[0][0]']          
                                                                                                  
 batch_normalization_38 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_38[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_35 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_38[0][0]'] 
                                                                                                  
 conv2d_39 (Conv2D)             (None, 14, 14, 1024  263168      ['activation_35[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_39 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_39[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_11 (Add)                   (None, 14, 14, 1024  0           ['batch_normalization_39[0][0]', 
                                )                                 'activation_33[0][0]']          
                                                                                                  
 activation_36 (Activation)     (None, 14, 14, 1024  0           ['add_11[0][0]']                 
                                )                                                                 
                                                                                                  
 conv2d_40 (Conv2D)             (None, 14, 14, 256)  262400      ['activation_36[0][0]']          
                                                                                                  
 batch_normalization_40 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_40[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_37 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_40[0][0]'] 
                                                                                                  
 conv2d_41 (Conv2D)             (None, 14, 14, 256)  590080      ['activation_37[0][0]']          
                                                                                                  
 batch_normalization_41 (BatchN  (None, 14, 14, 256)  1024       ['conv2d_41[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_38 (Activation)     (None, 14, 14, 256)  0           ['batch_normalization_41[0][0]'] 
                                                                                                  
 conv2d_42 (Conv2D)             (None, 14, 14, 1024  263168      ['activation_38[0][0]']          
                                )                                                                 
                                                                                                  
 batch_normalization_42 (BatchN  (None, 14, 14, 1024  4096       ['conv2d_42[0][0]']              
 ormalization)                  )                                                                 
                                                                                                  
 add_12 (Add)                   (None, 14, 14, 1024  0           ['batch_normalization_42[0][0]', 
                                )                                 'activation_36[0][0]']          
                                                                                                  
 activation_39 (Activation)     (None, 14, 14, 1024  0           ['add_12[0][0]']                 
                                )                                                                 
                                                                                                  
 conv2d_43 (Conv2D)             (None, 7, 7, 512)    524800      ['activation_39[0][0]']          
                                                                                                  
 batch_normalization_43 (BatchN  (None, 7, 7, 512)   2048        ['conv2d_43[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_40 (Activation)     (None, 7, 7, 512)    0           ['batch_normalization_43[0][0]'] 
                                                                                                  
 conv2d_44 (Conv2D)             (None, 7, 7, 512)    2359808     ['activation_40[0][0]']          
                                                                                                  
 batch_normalization_44 (BatchN  (None, 7, 7, 512)   2048        ['conv2d_44[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_41 (Activation)     (None, 7, 7, 512)    0           ['batch_normalization_44[0][0]'] 
                                                                                                  
 conv2d_45 (Conv2D)             (None, 7, 7, 2048)   1050624     ['activation_41[0][0]']          
                                                                                                  
 conv2d_46 (Conv2D)             (None, 7, 7, 2048)   2099200     ['activation_39[0][0]']          
                                                                                                  
 batch_normalization_45 (BatchN  (None, 7, 7, 2048)  8192        ['conv2d_45[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 batch_normalization_46 (BatchN  (None, 7, 7, 2048)  8192        ['conv2d_46[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_13 (Add)                   (None, 7, 7, 2048)   0           ['batch_normalization_45[0][0]', 
                                                                  'batch_normalization_46[0][0]'] 
                                                                                                  
 activation_42 (Activation)     (None, 7, 7, 2048)   0           ['add_13[0][0]']                 
                                                                                                  
 conv2d_47 (Conv2D)             (None, 7, 7, 512)    1049088     ['activation_42[0][0]']          
                                                                                                  
 batch_normalization_47 (BatchN  (None, 7, 7, 512)   2048        ['conv2d_47[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_43 (Activation)     (None, 7, 7, 512)    0           ['batch_normalization_47[0][0]'] 
                                                                                                  
 conv2d_48 (Conv2D)             (None, 7, 7, 512)    2359808     ['activation_43[0][0]']          
                                                                                                  
 batch_normalization_48 (BatchN  (None, 7, 7, 512)   2048        ['conv2d_48[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_44 (Activation)     (None, 7, 7, 512)    0           ['batch_normalization_48[0][0]'] 
                                                                                                  
 conv2d_49 (Conv2D)             (None, 7, 7, 2048)   1050624     ['activation_44[0][0]']          
                                                                                                  
 batch_normalization_49 (BatchN  (None, 7, 7, 2048)  8192        ['conv2d_49[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_14 (Add)                   (None, 7, 7, 2048)   0           ['batch_normalization_49[0][0]', 
                                                                  'activation_42[0][0]']          
                                                                                                  
 activation_45 (Activation)     (None, 7, 7, 2048)   0           ['add_14[0][0]']                 
                                                                                                  
 conv2d_50 (Conv2D)             (None, 7, 7, 512)    1049088     ['activation_45[0][0]']          
                                                                                                  
 batch_normalization_50 (BatchN  (None, 7, 7, 512)   2048        ['conv2d_50[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_46 (Activation)     (None, 7, 7, 512)    0           ['batch_normalization_50[0][0]'] 
                                                                                                  
 conv2d_51 (Conv2D)             (None, 7, 7, 512)    2359808     ['activation_46[0][0]']          
                                                                                                  
 batch_normalization_51 (BatchN  (None, 7, 7, 512)   2048        ['conv2d_51[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 activation_47 (Activation)     (None, 7, 7, 512)    0           ['batch_normalization_51[0][0]'] 
                                                                                                  
 conv2d_52 (Conv2D)             (None, 7, 7, 2048)   1050624     ['activation_47[0][0]']          
                                                                                                  
 batch_normalization_52 (BatchN  (None, 7, 7, 2048)  8192        ['conv2d_52[0][0]']              
 ormalization)                                                                                    
                                                                                                  
 add_15 (Add)                   (None, 7, 7, 2048)   0           ['batch_normalization_52[0][0]', 
                                                                  'activation_45[0][0]']          
                                                                                                  
 activation_48 (Activation)     (None, 7, 7, 2048)   0           ['add_15[0][0]']                 
                                                                                                  
 average_pooling2d (AveragePool  (None, 4, 4, 2048)  0           ['activation_48[0][0]']          
 ing2D)                                                                                           
                                                                                                  
 flatten (Flatten)              (None, 32768)        0           ['average_pooling2d[0][0]']      
                                                                                                  
 dense (Dense)                  (None, 196)          6422724     ['flatten[0][0]']                
                                                                                                  
==================================================================================================
Total params: 30,010,436
Trainable params: 29,957,316
Non-trainable params: 53,120
__________________________________________________________________________________________________
In [ ]:
visualkeras_view(base_model_3)
Out[ ]:
In [ ]:
# Fit the model
base_model_3_history=base_model_3.fit(X_train, y_train, validation_data=(X_test, y_test) , epochs=20, batch_size=50, verbose=1, callbacks=create_callbacks('base_model_3'))
Epoch 1/20
163/163 [==============================] - ETA: 0s - loss: 25.5179 - accuracy: 0.0068
Epoch 1: saving model to base_model_3_weights.h5
163/163 [==============================] - 2072s 13s/step - loss: 25.5179 - accuracy: 0.0068 - val_loss: 20.2788 - val_accuracy: 0.0051 - lr: 0.0010
Epoch 2/20
163/163 [==============================] - ETA: 0s - loss: 18.1506 - accuracy: 0.0107
Epoch 2: saving model to base_model_3_weights.h5
163/163 [==============================] - 2100s 13s/step - loss: 18.1506 - accuracy: 0.0107 - val_loss: 16.3934 - val_accuracy: 0.0057 - lr: 0.0010
Epoch 3/20
163/163 [==============================] - ETA: 0s - loss: 14.7518 - accuracy: 0.0332 
Epoch 3: saving model to base_model_3_weights.h5
163/163 [==============================] - 3062s 19s/step - loss: 14.7518 - accuracy: 0.0332 - val_loss: 13.7801 - val_accuracy: 0.0175 - lr: 0.0010
Epoch 4/20
163/163 [==============================] - ETA: 0s - loss: 12.3707 - accuracy: 0.0614 
Epoch 4: saving model to base_model_3_weights.h5
163/163 [==============================] - 3092s 19s/step - loss: 12.3707 - accuracy: 0.0614 - val_loss: 11.8351 - val_accuracy: 0.0414 - lr: 0.0010
Epoch 5/20
163/163 [==============================] - ETA: 0s - loss: 10.6814 - accuracy: 0.0965 
Epoch 5: saving model to base_model_3_weights.h5
163/163 [==============================] - 3425s 21s/step - loss: 10.6814 - accuracy: 0.0965 - val_loss: 10.6682 - val_accuracy: 0.0556 - lr: 0.0010
Epoch 6/20
163/163 [==============================] - ETA: 0s - loss: 9.6998 - accuracy: 0.1195 
Epoch 6: saving model to base_model_3_weights.h5
163/163 [==============================] - 4765s 29s/step - loss: 9.6998 - accuracy: 0.1195 - val_loss: 10.1946 - val_accuracy: 0.0507 - lr: 0.0010
Epoch 7/20
163/163 [==============================] - ETA: 0s - loss: 8.5428 - accuracy: 0.1864 
Epoch 7: saving model to base_model_3_weights.h5
163/163 [==============================] - 2765s 17s/step - loss: 8.5428 - accuracy: 0.1864 - val_loss: 9.3786 - val_accuracy: 0.0698 - lr: 0.0010
Epoch 8/20
163/163 [==============================] - ETA: 0s - loss: 7.5018 - accuracy: 0.2692 
Epoch 8: saving model to base_model_3_weights.h5
163/163 [==============================] - 2569s 16s/step - loss: 7.5018 - accuracy: 0.2692 - val_loss: 8.8753 - val_accuracy: 0.0854 - lr: 0.0010
Epoch 9/20
163/163 [==============================] - ETA: 0s - loss: 6.5529 - accuracy: 0.3821 
Epoch 9: saving model to base_model_3_weights.h5
163/163 [==============================] - 2953s 18s/step - loss: 6.5529 - accuracy: 0.3821 - val_loss: 8.8275 - val_accuracy: 0.0800 - lr: 0.0010
Epoch 10/20
163/163 [==============================] - ETA: 0s - loss: 5.6983 - accuracy: 0.5032 
Epoch 10: saving model to base_model_3_weights.h5
163/163 [==============================] - 3211s 20s/step - loss: 5.6983 - accuracy: 0.5032 - val_loss: 8.6039 - val_accuracy: 0.1014 - lr: 0.0010
Epoch 11/20
163/163 [==============================] - ETA: 0s - loss: 4.7620 - accuracy: 0.6567 
Epoch 11: saving model to base_model_3_weights.h5
163/163 [==============================] - 2923s 18s/step - loss: 4.7620 - accuracy: 0.6567 - val_loss: 8.8858 - val_accuracy: 0.0878 - lr: 0.0010
Epoch 12/20
163/163 [==============================] - ETA: 0s - loss: 3.9849 - accuracy: 0.8042 
Epoch 12: saving model to base_model_3_weights.h5
163/163 [==============================] - 2647s 16s/step - loss: 3.9849 - accuracy: 0.8042 - val_loss: 9.1994 - val_accuracy: 0.0938 - lr: 0.0010
Epoch 13/20
163/163 [==============================] - ETA: 0s - loss: 3.3583 - accuracy: 0.9570 
Epoch 13: saving model to base_model_3_weights.h5
163/163 [==============================] - 2852s 17s/step - loss: 3.3583 - accuracy: 0.9570 - val_loss: 9.2188 - val_accuracy: 0.1229 - lr: 1.0000e-04
Epoch 14/20
163/163 [==============================] - ETA: 0s - loss: 3.1835 - accuracy: 0.9883 
Epoch 14: saving model to base_model_3_weights.h5
163/163 [==============================] - 3032s 19s/step - loss: 3.1835 - accuracy: 0.9883 - val_loss: 9.7060 - val_accuracy: 0.1270 - lr: 1.0000e-04
Epoch 15/20
163/163 [==============================] - ETA: 0s - loss: 3.1340 - accuracy: 0.9953 
Epoch 15: saving model to base_model_3_weights.h5
163/163 [==============================] - 3482s 21s/step - loss: 3.1340 - accuracy: 0.9953 - val_loss: 10.0445 - val_accuracy: 0.1271 - lr: 1.0000e-05
Epoch 16/20
163/163 [==============================] - ETA: 0s - loss: 3.1245 - accuracy: 0.9963 
Epoch 16: saving model to base_model_3_weights.h5
163/163 [==============================] - 2978s 18s/step - loss: 3.1245 - accuracy: 0.9963 - val_loss: 10.1494 - val_accuracy: 0.1271 - lr: 1.0000e-05
Epoch 17/20
163/163 [==============================] - ETA: 0s - loss: 3.1210 - accuracy: 0.9953 
Epoch 17: saving model to base_model_3_weights.h5
163/163 [==============================] - 2867s 18s/step - loss: 3.1210 - accuracy: 0.9953 - val_loss: 10.2173 - val_accuracy: 0.1266 - lr: 1.0000e-05
Epoch 18/20
163/163 [==============================] - ETA: 0s - loss: 3.1168 - accuracy: 0.9948 
Epoch 18: saving model to base_model_3_weights.h5
163/163 [==============================] - 2776s 17s/step - loss: 3.1168 - accuracy: 0.9948 - val_loss: 10.2252 - val_accuracy: 0.1272 - lr: 1.0000e-05
In [ ]:
# create an iterator object with write permission - base_model_2.pkl
with open('base_model_3.pkl', 'wb') as files:
    pickle.dump(base_model_3, files)
Keras weights file (<HDF5 file "variables.h5" (mode r+)>) saving:
...layers
......activation
.........vars
......activation_1
.........vars
......activation_10
.........vars
......activation_11
.........vars
......activation_12
.........vars
......activation_13
.........vars
......activation_14
.........vars
......activation_15
.........vars
......activation_16
.........vars
......activation_17
.........vars
......activation_18
.........vars
......activation_19
.........vars
......activation_2
.........vars
......activation_20
.........vars
......activation_21
.........vars
......activation_22
.........vars
......activation_23
.........vars
......activation_24
.........vars
......activation_25
.........vars
......activation_26
.........vars
......activation_27
.........vars
......activation_28
.........vars
......activation_29
.........vars
......activation_3
.........vars
......activation_30
.........vars
......activation_31
.........vars
......activation_32
.........vars
......activation_33
.........vars
......activation_34
.........vars
......activation_35
.........vars
......activation_36
.........vars
......activation_37
.........vars
......activation_38
.........vars
......activation_39
.........vars
......activation_4
.........vars
......activation_40
.........vars
......activation_41
.........vars
......activation_42
.........vars
......activation_43
.........vars
......activation_44
.........vars
......activation_45
.........vars
......activation_46
.........vars
......activation_47
.........vars
......activation_48
.........vars
......activation_5
.........vars
......activation_6
.........vars
......activation_7
.........vars
......activation_8
.........vars
......activation_9
.........vars
......add
.........vars
......add_1
.........vars
......add_10
.........vars
......add_11
.........vars
......add_12
.........vars
......add_13
.........vars
......add_14
.........vars
......add_15
.........vars
......add_2
.........vars
......add_3
.........vars
......add_4
.........vars
......add_5
.........vars
......add_6
.........vars
......add_7
.........vars
......add_8
.........vars
......add_9
.........vars
......average_pooling2d
.........vars
......batch_normalization
.........vars
............0
............1
............2
............3
......batch_normalization_1
.........vars
............0
............1
............2
............3
......batch_normalization_10
.........vars
............0
............1
............2
............3
......batch_normalization_11
.........vars
............0
............1
............2
............3
......batch_normalization_12
.........vars
............0
............1
............2
............3
......batch_normalization_13
.........vars
............0
............1
............2
............3
......batch_normalization_14
.........vars
............0
............1
............2
............3
......batch_normalization_15
.........vars
............0
............1
............2
............3
......batch_normalization_16
.........vars
............0
............1
............2
............3
......batch_normalization_17
.........vars
............0
............1
............2
............3
......batch_normalization_18
.........vars
............0
............1
............2
............3
......batch_normalization_19
.........vars
............0
............1
............2
............3
......batch_normalization_2
.........vars
............0
............1
............2
............3
......batch_normalization_20
.........vars
............0
............1
............2
............3
......batch_normalization_21
.........vars
............0
............1
............2
............3
......batch_normalization_22
.........vars
............0
............1
............2
............3
......batch_normalization_23
.........vars
............0
............1
............2
............3
......batch_normalization_24
.........vars
............0
............1
............2
............3
......batch_normalization_25
.........vars
............0
............1
............2
............3
......batch_normalization_26
.........vars
............0
............1
............2
............3
......batch_normalization_27
.........vars
............0
............1
............2
............3
......batch_normalization_28
.........vars
............0
............1
............2
............3
......batch_normalization_29
.........vars
............0
............1
............2
............3
......batch_normalization_3
.........vars
............0
............1
............2
............3
......batch_normalization_30
.........vars
............0
............1
............2
............3
......batch_normalization_31
.........vars
............0
............1
............2
............3
......batch_normalization_32
.........vars
............0
............1
............2
............3
......batch_normalization_33
.........vars
............0
............1
............2
............3
......batch_normalization_34
.........vars
............0
............1
............2
............3
......batch_normalization_35
.........vars
............0
............1
............2
............3
......batch_normalization_36
.........vars
............0
............1
............2
............3
......batch_normalization_37
.........vars
............0
............1
............2
............3
......batch_normalization_38
.........vars
............0
............1
............2
............3
......batch_normalization_39
.........vars
............0
............1
............2
............3
......batch_normalization_4
.........vars
............0
............1
............2
............3
......batch_normalization_40
.........vars
............0
............1
............2
............3
......batch_normalization_41
.........vars
............0
............1
............2
............3
......batch_normalization_42
.........vars
............0
............1
............2
............3
......batch_normalization_43
.........vars
............0
............1
............2
............3
......batch_normalization_44
.........vars
............0
............1
............2
............3
......batch_normalization_45
.........vars
............0
............1
............2
............3
......batch_normalization_46
.........vars
............0
............1
............2
............3
......batch_normalization_47
.........vars
............0
............1
............2
............3
......batch_normalization_48
.........vars
............0
............1
............2
............3
......batch_normalization_49
.........vars
............0
............1
............2
............3
......batch_normalization_5
.........vars
............0
............1
............2
............3
......batch_normalization_50
.........vars
............0
............1
............2
............3
......batch_normalization_51
.........vars
............0
............1
............2
............3
......batch_normalization_52
.........vars
............0
............1
............2
............3
......batch_normalization_6
.........vars
............0
............1
............2
............3
......batch_normalization_7
.........vars
............0
............1
............2
............3
......batch_normalization_8
.........vars
............0
............1
............2
............3
......batch_normalization_9
.........vars
............0
............1
............2
............3
......conv2d
.........vars
............0
............1
......conv2d_1
.........vars
............0
............1
......conv2d_10
.........vars
............0
............1
......conv2d_11
.........vars
............0
............1
......conv2d_12
.........vars
............0
............1
......conv2d_13
.........vars
............0
............1
......conv2d_14
.........vars
............0
............1
......conv2d_15
.........vars
............0
............1
......conv2d_16
.........vars
............0
............1
......conv2d_17
.........vars
............0
............1
......conv2d_18
.........vars
............0
............1
......conv2d_19
.........vars
............0
............1
......conv2d_2
.........vars
............0
............1
......conv2d_20
.........vars
............0
............1
......conv2d_21
.........vars
............0
............1
......conv2d_22
.........vars
............0
............1
......conv2d_23
.........vars
............0
............1
......conv2d_24
.........vars
............0
............1
......conv2d_25
.........vars
............0
............1
......conv2d_26
.........vars
............0
............1
......conv2d_27
.........vars
............0
............1
......conv2d_28
.........vars
............0
............1
......conv2d_29
.........vars
............0
............1
......conv2d_3
.........vars
............0
............1
......conv2d_30
.........vars
............0
............1
......conv2d_31
.........vars
............0
............1
......conv2d_32
.........vars
............0
............1
......conv2d_33
.........vars
............0
............1
......conv2d_34
.........vars
............0
............1
......conv2d_35
.........vars
............0
............1
......conv2d_36
.........vars
............0
............1
......conv2d_37
.........vars
............0
............1
......conv2d_38
.........vars
............0
............1
......conv2d_39
.........vars
............0
............1
......conv2d_4
.........vars
............0
............1
......conv2d_40
.........vars
............0
............1
......conv2d_41
.........vars
............0
............1
......conv2d_42
.........vars
............0
............1
......conv2d_43
.........vars
............0
............1
......conv2d_44
.........vars
............0
............1
......conv2d_45
.........vars
............0
............1
......conv2d_46
.........vars
............0
............1
......conv2d_47
.........vars
............0
............1
......conv2d_48
.........vars
............0
............1
......conv2d_49
.........vars
............0
............1
......conv2d_5
.........vars
............0
............1
......conv2d_50
.........vars
............0
............1
......conv2d_51
.........vars
............0
............1
......conv2d_52
.........vars
............0
............1
......conv2d_6
.........vars
............0
............1
......conv2d_7
.........vars
............0
............1
......conv2d_8
.........vars
............0
............1
......conv2d_9
.........vars
............0
............1
......dense
.........vars
............0
............1
......flatten
.........vars
......input_layer
.........vars
......max_pooling2d
.........vars
......zero_padding2d
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...vars
Keras model archive saving:
File Name                                             Modified             Size
config.json                                    2023-06-02 19:46:50        86244
metadata.json                                  2023-06-02 19:46:50           64
variables.h5                                   2023-06-02 19:46:50    120502344
In [ ]:
metric_score(base_model_3_history,base_model_3,50,'base_model_3',model_list)
163/163 [==============================] - 483s 3s/step - loss: 3.0951 - accuracy: 0.9977
161/161 [==============================] - 508s 3s/step - loss: 10.2252 - accuracy: 0.1272
252/252 [==============================] - 527s 2s/step

Classification Matrix:
               precision    recall  f1-score   support

           0       0.23      0.32      0.27        44
           1       0.06      0.03      0.04        32
           2       0.19      0.12      0.14        43
           3       0.06      0.07      0.06        42
           4       0.07      0.05      0.06        40
           5       0.15      0.16      0.15        44
           6       0.25      0.18      0.21        39
           7       0.11      0.09      0.10        45
           8       0.09      0.10      0.09        41
           9       0.05      0.03      0.04        33
          10       0.50      0.42      0.46        38
          11       0.17      0.08      0.11        36
          12       0.07      0.12      0.09        41
          13       0.04      0.07      0.05        42
          14       0.11      0.09      0.10        43
          15       0.09      0.07      0.08        43
          16       0.13      0.12      0.13        40
          17       0.10      0.12      0.11        42
          18       0.02      0.03      0.02        40
          19       0.02      0.02      0.02        46
          20       0.10      0.07      0.08        42
          21       0.06      0.05      0.05        42
          22       0.07      0.05      0.06        39
          23       0.07      0.07      0.07        45
          24       0.15      0.13      0.14        39
          25       0.24      0.24      0.24        34
          26       0.07      0.09      0.08        35
          27       0.15      0.07      0.10        41
          28       0.03      0.02      0.02        42
          29       0.19      0.20      0.19        41
          30       0.09      0.07      0.08        44
          31       0.08      0.07      0.08        41
          32       0.03      0.02      0.03        42
          33       0.07      0.09      0.08        44
          34       0.08      0.07      0.08        41
          35       0.04      0.02      0.03        41
          36       0.10      0.13      0.12        38
          37       0.14      0.12      0.13        40
          38       0.19      0.22      0.21        36
          39       0.12      0.18      0.14        39
          40       0.13      0.09      0.10        35
          41       0.10      0.06      0.07        34
          42       0.06      0.04      0.05        46
          43       0.03      0.02      0.02        44
          44       0.17      0.22      0.19        32
          45       0.36      0.23      0.28        43
          46       0.12      0.09      0.10        35
          47       0.06      0.07      0.07        42
          48       0.09      0.08      0.08        37
          49       0.11      0.10      0.10        42
          50       0.11      0.14      0.12        43
          51       0.04      0.02      0.03        41
          52       0.16      0.09      0.12        44
          53       0.03      0.05      0.04        40
          54       0.11      0.21      0.14        39
          55       0.17      0.17      0.17        46
          56       0.18      0.16      0.17        37
          57       0.10      0.11      0.11        44
          58       0.04      0.05      0.04        44
          59       0.20      0.14      0.16        36
          60       0.04      0.05      0.04        43
          61       0.13      0.14      0.13        37
          62       0.04      0.05      0.04        44
          63       0.24      0.17      0.20        29
          64       0.08      0.07      0.07        45
          65       0.12      0.12      0.12        41
          66       0.06      0.05      0.06        38
          67       0.21      0.15      0.17        40
          68       0.15      0.18      0.16        38
          69       0.14      0.21      0.17        42
          70       0.10      0.06      0.07        35
          71       0.07      0.07      0.07        45
          72       0.02      0.02      0.02        44
          73       0.13      0.12      0.12        43
          74       0.05      0.05      0.05        44
          75       0.27      0.28      0.27        43
          76       0.09      0.07      0.08        40
          77       0.14      0.14      0.14        37
          78       0.09      0.12      0.11        48
          79       0.18      0.19      0.18        43
          80       0.17      0.20      0.19        45
          81       0.12      0.09      0.10        45
          82       0.06      0.05      0.06        40
          83       0.18      0.21      0.20        42
          84       0.21      0.16      0.18        43
          85       0.21      0.19      0.20        42
          86       0.10      0.09      0.09        44
          87       0.28      0.18      0.22        39
          88       0.27      0.16      0.20        44
          89       0.09      0.05      0.06        41
          90       0.11      0.11      0.11        38
          91       0.07      0.07      0.07        40
          92       0.27      0.21      0.23        39
          93       0.21      0.14      0.17        43
          94       0.08      0.11      0.10        45
          95       0.03      0.02      0.03        41
          96       0.05      0.07      0.06        42
          97       0.02      0.02      0.02        46
          98       0.38      0.22      0.28        27
          99       0.41      0.36      0.39        33
         100       0.25      0.19      0.22        42
         101       0.21      0.23      0.22        39
         102       0.11      0.13      0.12        39
         103       0.12      0.14      0.13        42
         104       0.06      0.05      0.05        43
         105       0.12      0.15      0.13        41
         106       0.02      0.02      0.02        44
         107       0.10      0.16      0.12        44
         108       0.17      0.20      0.19        44
         109       0.06      0.07      0.06        43
         110       0.15      0.17      0.16        42
         111       0.05      0.04      0.04        45
         112       0.07      0.07      0.07        42
         113       0.13      0.16      0.14        45
         114       0.05      0.07      0.06        45
         115       0.26      0.19      0.22        37
         116       0.05      0.05      0.05        42
         117       0.09      0.10      0.10        41
         118       0.26      0.28      0.27        68
         119       0.06      0.07      0.07        42
         120       0.06      0.05      0.05        44
         121       0.15      0.23      0.18        40
         122       0.44      0.36      0.40        44
         123       0.16      0.18      0.17        39
         124       0.18      0.19      0.18        43
         125       0.19      0.17      0.18        42
         126       0.21      0.15      0.17        41
         127       0.17      0.18      0.17        39
         128       0.08      0.13      0.10        38
         129       0.11      0.10      0.10        41
         130       0.11      0.10      0.10        42
         131       0.09      0.07      0.08        43
         132       0.03      0.02      0.02        42
         133       0.14      0.09      0.11        33
         134       0.08      0.07      0.07        42
         135       0.00      0.00      0.00        24
         136       0.14      0.09      0.11        43
         137       0.15      0.08      0.10        39
         138       0.04      0.07      0.05        42
         139       0.14      0.14      0.14        42
         140       0.20      0.15      0.17        34
         141       0.10      0.09      0.10        32
         142       0.17      0.17      0.17        40
         143       0.04      0.04      0.04        46
         144       0.09      0.09      0.09        44
         145       0.37      0.37      0.37        43
         146       0.14      0.16      0.15        44
         147       0.04      0.09      0.06        45
         148       0.13      0.14      0.14        42
         149       0.41      0.39      0.40        36
         150       0.18      0.16      0.17        43
         151       0.75      0.51      0.61        35
         152       0.40      0.43      0.41        44
         153       0.18      0.26      0.21        42
         154       0.14      0.14      0.14        42
         155       0.13      0.13      0.13        39
         156       0.33      0.33      0.33        36
         157       0.25      0.10      0.15        29
         158       0.24      0.17      0.20        36
         159       0.49      0.39      0.43        44
         160       0.07      0.15      0.09        48
         161       0.19      0.22      0.21        45
         162       0.21      0.19      0.20        36
         163       0.16      0.26      0.20        43
         164       0.08      0.11      0.10        44
         165       0.39      0.41      0.40        41
         166       0.07      0.11      0.09        47
         167       0.31      0.31      0.31        42
         168       0.11      0.11      0.11        38
         169       0.04      0.02      0.03        44
         170       0.05      0.07      0.06        46
         171       0.08      0.07      0.07        44
         172       0.10      0.09      0.09        43
         173       0.15      0.15      0.15        41
         174       0.05      0.03      0.04        30
         175       0.06      0.05      0.06        38
         176       0.16      0.16      0.16        44
         177       0.02      0.02      0.02        41
         178       0.16      0.16      0.16        45
         179       0.11      0.12      0.11        42
         180       0.17      0.16      0.16        38
         181       0.03      0.02      0.02        46
         182       0.06      0.07      0.07        42
         183       0.02      0.03      0.02        40
         184       0.06      0.05      0.06        38
         185       0.27      0.32      0.29        38
         186       0.07      0.07      0.07        43
         187       0.07      0.09      0.08        43
         188       0.19      0.12      0.15        40
         189       0.11      0.07      0.08        43
         190       0.08      0.07      0.07        46
         191       0.10      0.10      0.10        42
         192       0.07      0.10      0.08        41
         193       0.00      0.00      0.00        45
         194       0.08      0.07      0.07        43
         195       0.35      0.15      0.21        40

    accuracy                           0.13      8041
   macro avg       0.14      0.13      0.13      8041
weighted avg       0.13      0.13      0.13      8041

In [ ]:
pd.DataFrame(model_list,columns=['Model','Train_Loss','Test_Loss','Train_Accuracy','Test_Accuracy','Precision','Recall','F1 Score']).sort_values(by=['Recall','F1 Score'], ascending=False)
Out[ ]:
Model Train_Loss Test_Loss Train_Accuracy Test_Accuracy Precision Recall F1 Score
1 base_model_2 1.634652 3.697144 0.672028 0.196990 0.196990 0.196990 0.196990
2 base_model_3 3.095142 10.225157 0.997667 0.127223 0.127223 0.127223 0.127223
0 base_model_1 0.704541 9.876921 0.840864 0.027111 0.027111 0.027111 0.027111

Milestone 2¶

Step 1: Fine tune the trained basic CNN models to classify the car.¶

As from above model_list, we get to know base_model_2 did good in old model, so we will be using that model for tuning purpose.

Image Augmentation¶

In [ ]:
train_datagen=ImageDataGenerator(preprocessing_function=preprocess_input,
                                rotation_range=20,
                                shear_range = 0.2,
                                width_shift_range=0.2,
                                height_shift_range=0.1,
                                zoom_range=0.2,
                                horizontal_flip=True,
                                vertical_flip=True,
                                brightness_range=[0.1,1.0]
                                )

test_datagen=ImageDataGenerator(preprocessing_function=preprocess_input,
                                rotation_range=20,
                                shear_range = 0.2,
                                width_shift_range=0.2,
                                height_shift_range=0.1,
                                zoom_range=0.2,
                                horizontal_flip=True,
                                vertical_flip=True,
                                brightness_range=[0.1,1.0]
                                )
In [ ]:
train_generator=train_datagen.flow_from_directory(
        directory="Car Images/Train Images Cropped/",
        batch_size=64,
        seed=random_state,
        target_size=(224,224),
        class_mode = 'categorical',
        shuffle=True)


test_generator=test_datagen.flow_from_directory(
    directory="Car Images/Test Images Cropped/",
    batch_size=64,
    seed=random_state,
    target_size=(224,224),
    class_mode = 'categorical',
    shuffle=True)
Found 8144 images belonging to 196 classes.
Found 8041 images belonging to 196 classes.

Tuning old scratch model for more accuracy with image augmentation.¶

In [ ]:
backend.clear_session()
tf.random.set_seed(random_state)

# Initialising CNN classifier
base_model_2_tuned=Sequential()

base_model_2_tuned.add(Conv2D(filters=96,kernel_size=(3,3),input_shape=(224,224,3),activation='relu'))
base_model_2_tuned.add(BatchNormalization())
base_model_2_tuned.add(MaxPool2D(pool_size=(3,3)))

base_model_2_tuned.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu'))
base_model_2_tuned.add(BatchNormalization())
base_model_2_tuned.add(MaxPool2D(pool_size=(3,3)))

base_model_2_tuned.add(Conv2D(filters=384,kernel_size=(3,3),activation='relu'))
base_model_2_tuned.add(BatchNormalization())
base_model_2_tuned.add(MaxPool2D(pool_size=(2,2)))

base_model_2_tuned.add(Conv2D(filters=384,kernel_size=(3,3),activation='relu'))
base_model_2_tuned.add(BatchNormalization())
base_model_2_tuned.add(MaxPool2D(pool_size=(2,2)))

base_model_2_tuned.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu'))
base_model_2_tuned.add(BatchNormalization())
base_model_2_tuned.add(MaxPool2D(pool_size=(2,2)))

# Flattening the layer before fully connected layers
base_model_2_tuned.add(Flatten())

base_model_2_tuned.add(Dense(units=512,activation='relu'))
base_model_2_tuned.add(Dropout(0.2))

base_model_2_tuned.add(Dense(units=512,activation='relu'))
base_model_2_tuned.add(Dropout(0.3))

# The final output layer with 5 neuron to predict the categorical classification
base_model_2_tuned.add(Dense(units=196,activation='softmax'))

adam = Adam(learning_rate=0.0001, beta_1=0.9, beta_2=0.999, decay=0.001, clipnorm=0.001)
base_model_2_tuned.compile(optimizer = adam, loss = categorical_crossentropy, metrics = ['accuracy'])

base_model_2_tuned.summary()
2023-06-14 16:51:18.817743: I tensorflow/core/platform/cpu_feature_guard.cc:193] This TensorFlow binary is optimized with oneAPI Deep Neural Network Library (oneDNN) to use the following CPU instructions in performance-critical operations:  AVX2 FMA
To enable them in other operations, rebuild TensorFlow with the appropriate compiler flags.
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d (Conv2D)             (None, 222, 222, 96)      2688      
                                                                 
 batch_normalization (BatchN  (None, 222, 222, 96)     384       
 ormalization)                                                   
                                                                 
 max_pooling2d (MaxPooling2D  (None, 74, 74, 96)       0         
 )                                                               
                                                                 
 conv2d_1 (Conv2D)           (None, 72, 72, 256)       221440    
                                                                 
 batch_normalization_1 (Batc  (None, 72, 72, 256)      1024      
 hNormalization)                                                 
                                                                 
 max_pooling2d_1 (MaxPooling  (None, 24, 24, 256)      0         
 2D)                                                             
                                                                 
 conv2d_2 (Conv2D)           (None, 22, 22, 384)       885120    
                                                                 
 batch_normalization_2 (Batc  (None, 22, 22, 384)      1536      
 hNormalization)                                                 
                                                                 
 max_pooling2d_2 (MaxPooling  (None, 11, 11, 384)      0         
 2D)                                                             
                                                                 
 conv2d_3 (Conv2D)           (None, 9, 9, 384)         1327488   
                                                                 
 batch_normalization_3 (Batc  (None, 9, 9, 384)        1536      
 hNormalization)                                                 
                                                                 
 max_pooling2d_3 (MaxPooling  (None, 4, 4, 384)        0         
 2D)                                                             
                                                                 
 conv2d_4 (Conv2D)           (None, 2, 2, 256)         884992    
                                                                 
 batch_normalization_4 (Batc  (None, 2, 2, 256)        1024      
 hNormalization)                                                 
                                                                 
 max_pooling2d_4 (MaxPooling  (None, 1, 1, 256)        0         
 2D)                                                             
                                                                 
 flatten (Flatten)           (None, 256)               0         
                                                                 
 dense (Dense)               (None, 512)               131584    
                                                                 
 dropout (Dropout)           (None, 512)               0         
                                                                 
 dense_1 (Dense)             (None, 512)               262656    
                                                                 
 dropout_1 (Dropout)         (None, 512)               0         
                                                                 
 dense_2 (Dense)             (None, 196)               100548    
                                                                 
=================================================================
Total params: 3,822,020
Trainable params: 3,819,268
Non-trainable params: 2,752
_________________________________________________________________
In [ ]:
visualkeras_view(base_model_2_tuned)
Out[ ]:
In [ ]:
# Fit the model
base_model_2_tuned_history = base_model_2_tuned.fit_generator(train_generator, validation_data=test_generator, epochs=50, verbose=1, callbacks=create_callbacks('base_model_2_tuned'), steps_per_epoch=100, validation_steps=50)
Epoch 1/50
100/100 [==============================] - ETA: 0s - loss: 5.4532 - accuracy: 0.0066
Epoch 1: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1048s 11s/step - loss: 5.4532 - accuracy: 0.0066 - val_loss: 5.3047 - val_accuracy: 0.0053 - lr: 1.0000e-04
Epoch 2/50
100/100 [==============================] - ETA: 0s - loss: 5.3140 - accuracy: 0.0113
Epoch 2: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1064s 11s/step - loss: 5.3140 - accuracy: 0.0113 - val_loss: 5.3069 - val_accuracy: 0.0037 - lr: 1.0000e-04
Epoch 3/50
100/100 [==============================] - ETA: 0s - loss: 5.2629 - accuracy: 0.0107
Epoch 3: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1053s 11s/step - loss: 5.2629 - accuracy: 0.0107 - val_loss: 5.3080 - val_accuracy: 0.0053 - lr: 1.0000e-04
Epoch 4/50
100/100 [==============================] - ETA: 0s - loss: 5.2132 - accuracy: 0.0164
Epoch 4: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 864s 9s/step - loss: 5.2132 - accuracy: 0.0164 - val_loss: 5.2730 - val_accuracy: 0.0084 - lr: 1.0000e-05
Epoch 5/50
100/100 [==============================] - ETA: 0s - loss: 5.2071 - accuracy: 0.0167
Epoch 5: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 914s 9s/step - loss: 5.2071 - accuracy: 0.0167 - val_loss: 5.2246 - val_accuracy: 0.0150 - lr: 1.0000e-05
Epoch 6/50
100/100 [==============================] - ETA: 0s - loss: 5.2032 - accuracy: 0.0170
Epoch 6: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1076s 11s/step - loss: 5.2032 - accuracy: 0.0170 - val_loss: 5.1873 - val_accuracy: 0.0197 - lr: 1.0000e-05
Epoch 7/50
100/100 [==============================] - ETA: 0s - loss: 5.1971 - accuracy: 0.0162
Epoch 7: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1057s 11s/step - loss: 5.1971 - accuracy: 0.0162 - val_loss: 5.1594 - val_accuracy: 0.0197 - lr: 1.0000e-05
Epoch 8/50
100/100 [==============================] - ETA: 0s - loss: 5.1926 - accuracy: 0.0132
Epoch 8: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1052s 11s/step - loss: 5.1926 - accuracy: 0.0132 - val_loss: 5.1554 - val_accuracy: 0.0172 - lr: 1.0000e-05
Epoch 9/50
100/100 [==============================] - ETA: 0s - loss: 5.1852 - accuracy: 0.0159
Epoch 9: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1038s 10s/step - loss: 5.1852 - accuracy: 0.0159 - val_loss: 5.1517 - val_accuracy: 0.0219 - lr: 1.0000e-05
Epoch 10/50
100/100 [==============================] - ETA: 0s - loss: 5.1866 - accuracy: 0.0184
Epoch 10: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1025s 10s/step - loss: 5.1866 - accuracy: 0.0184 - val_loss: 5.1337 - val_accuracy: 0.0213 - lr: 1.0000e-05
Epoch 11/50
100/100 [==============================] - ETA: 0s - loss: 5.1793 - accuracy: 0.0162
Epoch 11: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1044s 10s/step - loss: 5.1793 - accuracy: 0.0162 - val_loss: 5.1343 - val_accuracy: 0.0259 - lr: 1.0000e-05
Epoch 12/50
100/100 [==============================] - ETA: 0s - loss: 5.1734 - accuracy: 0.0189
Epoch 12: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1053s 11s/step - loss: 5.1734 - accuracy: 0.0189 - val_loss: 5.1426 - val_accuracy: 0.0244 - lr: 1.0000e-05
Epoch 13/50
100/100 [==============================] - ETA: 0s - loss: 5.1691 - accuracy: 0.0164
Epoch 13: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1033s 10s/step - loss: 5.1691 - accuracy: 0.0164 - val_loss: 5.1370 - val_accuracy: 0.0228 - lr: 1.0000e-05
Epoch 14/50
100/100 [==============================] - ETA: 0s - loss: 5.1759 - accuracy: 0.0200
Epoch 14: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1057s 11s/step - loss: 5.1759 - accuracy: 0.0200 - val_loss: 5.1335 - val_accuracy: 0.0259 - lr: 1.0000e-05
Epoch 15/50
100/100 [==============================] - ETA: 0s - loss: 5.1625 - accuracy: 0.0184
Epoch 15: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1037s 10s/step - loss: 5.1625 - accuracy: 0.0184 - val_loss: 5.1230 - val_accuracy: 0.0256 - lr: 1.0000e-05
Epoch 16/50
100/100 [==============================] - ETA: 0s - loss: 5.1586 - accuracy: 0.0195
Epoch 16: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1044s 10s/step - loss: 5.1586 - accuracy: 0.0195 - val_loss: 5.1302 - val_accuracy: 0.0222 - lr: 1.0000e-05
Epoch 17/50
100/100 [==============================] - ETA: 0s - loss: 5.1560 - accuracy: 0.0233
Epoch 17: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1051s 11s/step - loss: 5.1560 - accuracy: 0.0233 - val_loss: 5.1297 - val_accuracy: 0.0231 - lr: 1.0000e-05
Epoch 18/50
100/100 [==============================] - ETA: 0s - loss: 5.1607 - accuracy: 0.0168
Epoch 18: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1068s 11s/step - loss: 5.1607 - accuracy: 0.0168 - val_loss: 5.1344 - val_accuracy: 0.0219 - lr: 1.0000e-05
Epoch 19/50
100/100 [==============================] - ETA: 0s - loss: 5.1490 - accuracy: 0.0206
Epoch 19: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1057s 11s/step - loss: 5.1490 - accuracy: 0.0206 - val_loss: 5.1236 - val_accuracy: 0.0266 - lr: 1.0000e-05
Epoch 20/50
100/100 [==============================] - ETA: 0s - loss: 5.1543 - accuracy: 0.0172
Epoch 20: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 1084s 11s/step - loss: 5.1543 - accuracy: 0.0172 - val_loss: 5.1115 - val_accuracy: 0.0294 - lr: 1.0000e-05
Epoch 21/50
100/100 [==============================] - ETA: 0s - loss: 5.1593 - accuracy: 0.0192
Epoch 21: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 996s 10s/step - loss: 5.1593 - accuracy: 0.0192 - val_loss: 5.1131 - val_accuracy: 0.0300 - lr: 1.0000e-05
Epoch 22/50
100/100 [==============================] - ETA: 0s - loss: 5.1472 - accuracy: 0.0192
Epoch 22: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 931s 9s/step - loss: 5.1472 - accuracy: 0.0192 - val_loss: 5.1286 - val_accuracy: 0.0203 - lr: 1.0000e-05
Epoch 23/50
100/100 [==============================] - ETA: 0s - loss: 5.1529 - accuracy: 0.0190
Epoch 23: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 935s 9s/step - loss: 5.1529 - accuracy: 0.0190 - val_loss: 5.1122 - val_accuracy: 0.0241 - lr: 1.0000e-05
Epoch 24/50
100/100 [==============================] - ETA: 0s - loss: 5.1545 - accuracy: 0.0190
Epoch 24: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 925s 9s/step - loss: 5.1545 - accuracy: 0.0190 - val_loss: 5.1077 - val_accuracy: 0.0253 - lr: 1.0000e-05
Epoch 25/50
100/100 [==============================] - ETA: 0s - loss: 5.1507 - accuracy: 0.0195
Epoch 25: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 924s 9s/step - loss: 5.1507 - accuracy: 0.0195 - val_loss: 5.1081 - val_accuracy: 0.0275 - lr: 1.0000e-05
Epoch 26/50
100/100 [==============================] - ETA: 0s - loss: 5.1476 - accuracy: 0.0209
Epoch 26: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 925s 9s/step - loss: 5.1476 - accuracy: 0.0209 - val_loss: 5.1072 - val_accuracy: 0.0291 - lr: 1.0000e-05
Epoch 27/50
100/100 [==============================] - ETA: 0s - loss: 5.1447 - accuracy: 0.0209
Epoch 27: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 928s 9s/step - loss: 5.1447 - accuracy: 0.0209 - val_loss: 5.1031 - val_accuracy: 0.0291 - lr: 1.0000e-05
Epoch 28/50
100/100 [==============================] - ETA: 0s - loss: 5.1352 - accuracy: 0.0194
Epoch 28: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 940s 9s/step - loss: 5.1352 - accuracy: 0.0194 - val_loss: 5.1087 - val_accuracy: 0.0266 - lr: 1.0000e-05
Epoch 29/50
100/100 [==============================] - ETA: 0s - loss: 5.1247 - accuracy: 0.0198
Epoch 29: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 928s 9s/step - loss: 5.1247 - accuracy: 0.0198 - val_loss: 5.1092 - val_accuracy: 0.0231 - lr: 1.0000e-05
Epoch 30/50
100/100 [==============================] - ETA: 0s - loss: 5.1208 - accuracy: 0.0223
Epoch 30: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 943s 9s/step - loss: 5.1208 - accuracy: 0.0223 - val_loss: 5.0982 - val_accuracy: 0.0256 - lr: 1.0000e-05
Epoch 31/50
100/100 [==============================] - ETA: 0s - loss: 5.1386 - accuracy: 0.0206
Epoch 31: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 936s 9s/step - loss: 5.1386 - accuracy: 0.0206 - val_loss: 5.0903 - val_accuracy: 0.0269 - lr: 1.0000e-05
Epoch 32/50
100/100 [==============================] - ETA: 0s - loss: 5.1443 - accuracy: 0.0222
Epoch 32: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 959s 10s/step - loss: 5.1443 - accuracy: 0.0222 - val_loss: 5.1085 - val_accuracy: 0.0272 - lr: 1.0000e-05
Epoch 33/50
100/100 [==============================] - ETA: 0s - loss: 5.1403 - accuracy: 0.0213
Epoch 33: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 939s 9s/step - loss: 5.1403 - accuracy: 0.0213 - val_loss: 5.0949 - val_accuracy: 0.0244 - lr: 1.0000e-05
Epoch 34/50
100/100 [==============================] - ETA: 0s - loss: 5.1245 - accuracy: 0.0213
Epoch 34: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 927s 9s/step - loss: 5.1245 - accuracy: 0.0213 - val_loss: 5.0971 - val_accuracy: 0.0253 - lr: 1.0000e-05
Epoch 35/50
100/100 [==============================] - ETA: 0s - loss: 5.1405 - accuracy: 0.0214
Epoch 35: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 915s 9s/step - loss: 5.1405 - accuracy: 0.0214 - val_loss: 5.1112 - val_accuracy: 0.0237 - lr: 1.0000e-05
Epoch 36/50
100/100 [==============================] - ETA: 0s - loss: 5.1217 - accuracy: 0.0211
Epoch 36: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 934s 9s/step - loss: 5.1217 - accuracy: 0.0211 - val_loss: 5.0993 - val_accuracy: 0.0259 - lr: 1.0000e-05
Epoch 37/50
100/100 [==============================] - ETA: 0s - loss: 5.1167 - accuracy: 0.0225
Epoch 37: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 913s 9s/step - loss: 5.1167 - accuracy: 0.0225 - val_loss: 5.1020 - val_accuracy: 0.0259 - lr: 1.0000e-05
Epoch 38/50
100/100 [==============================] - ETA: 0s - loss: 5.1355 - accuracy: 0.0202
Epoch 38: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 914s 9s/step - loss: 5.1355 - accuracy: 0.0202 - val_loss: 5.0812 - val_accuracy: 0.0278 - lr: 1.0000e-05
Epoch 39/50
100/100 [==============================] - ETA: 0s - loss: 5.1274 - accuracy: 0.0200
Epoch 39: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 921s 9s/step - loss: 5.1274 - accuracy: 0.0200 - val_loss: 5.1037 - val_accuracy: 0.0256 - lr: 1.0000e-05
Epoch 40/50
100/100 [==============================] - ETA: 0s - loss: 5.1317 - accuracy: 0.0194
Epoch 40: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 922s 9s/step - loss: 5.1317 - accuracy: 0.0194 - val_loss: 5.0922 - val_accuracy: 0.0294 - lr: 1.0000e-05
Epoch 41/50
100/100 [==============================] - ETA: 0s - loss: 5.1196 - accuracy: 0.0227
Epoch 41: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 909s 9s/step - loss: 5.1196 - accuracy: 0.0227 - val_loss: 5.1021 - val_accuracy: 0.0262 - lr: 1.0000e-05
Epoch 42/50
100/100 [==============================] - ETA: 0s - loss: 5.1291 - accuracy: 0.0209
Epoch 42: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 909s 9s/step - loss: 5.1291 - accuracy: 0.0209 - val_loss: 5.0938 - val_accuracy: 0.0284 - lr: 1.0000e-05
Epoch 43/50
100/100 [==============================] - ETA: 0s - loss: 5.1275 - accuracy: 0.0176
Epoch 43: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 937s 9s/step - loss: 5.1275 - accuracy: 0.0176 - val_loss: 5.0898 - val_accuracy: 0.0253 - lr: 1.0000e-05
Epoch 44/50
100/100 [==============================] - ETA: 0s - loss: 5.1285 - accuracy: 0.0211
Epoch 44: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 919s 9s/step - loss: 5.1285 - accuracy: 0.0211 - val_loss: 5.0871 - val_accuracy: 0.0297 - lr: 1.0000e-05
Epoch 45/50
100/100 [==============================] - ETA: 0s - loss: 5.1210 - accuracy: 0.0222
Epoch 45: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 908s 9s/step - loss: 5.1210 - accuracy: 0.0222 - val_loss: 5.0805 - val_accuracy: 0.0253 - lr: 1.0000e-05
Epoch 46/50
100/100 [==============================] - ETA: 0s - loss: 5.1183 - accuracy: 0.0208
Epoch 46: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 913s 9s/step - loss: 5.1183 - accuracy: 0.0208 - val_loss: 5.0832 - val_accuracy: 0.0284 - lr: 1.0000e-05
Epoch 47/50
100/100 [==============================] - ETA: 0s - loss: 5.1148 - accuracy: 0.0239
Epoch 47: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 913s 9s/step - loss: 5.1148 - accuracy: 0.0239 - val_loss: 5.0824 - val_accuracy: 0.0272 - lr: 1.0000e-05
Epoch 48/50
100/100 [==============================] - ETA: 0s - loss: 5.1110 - accuracy: 0.0246
Epoch 48: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 886s 9s/step - loss: 5.1110 - accuracy: 0.0246 - val_loss: 5.0825 - val_accuracy: 0.0269 - lr: 1.0000e-05
Epoch 49/50
100/100 [==============================] - ETA: 0s - loss: 5.1112 - accuracy: 0.0233
Epoch 49: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 910s 9s/step - loss: 5.1112 - accuracy: 0.0233 - val_loss: 5.0865 - val_accuracy: 0.0284 - lr: 1.0000e-05
Epoch 50/50
100/100 [==============================] - ETA: 0s - loss: 5.1243 - accuracy: 0.0244
Epoch 50: saving model to base_model_2_tuned_weights.h5
100/100 [==============================] - 876s 9s/step - loss: 5.1243 - accuracy: 0.0244 - val_loss: 5.0996 - val_accuracy: 0.0234 - lr: 1.0000e-05
In [ ]:
# create an iterator object with write permission - base_model_2.pkl
with open('base_model_2_tuned.pkl', 'wb') as files:
    pickle.dump(base_model_2_tuned, files)
Keras weights file (<HDF5 file "variables.h5" (mode r+)>) saving:
...layers
......batch_normalization
.........vars
............0
............1
............2
............3
......batch_normalization_1
.........vars
............0
............1
............2
............3
......batch_normalization_2
.........vars
............0
............1
............2
............3
......batch_normalization_3
.........vars
............0
............1
............2
............3
......batch_normalization_4
.........vars
............0
............1
............2
............3
......conv2d
.........vars
............0
............1
......conv2d_1
.........vars
............0
............1
......conv2d_2
.........vars
............0
............1
......conv2d_3
.........vars
............0
............1
......conv2d_4
.........vars
............0
............1
......dense
.........vars
............0
............1
......dense_1
.........vars
............0
............1
......dense_2
.........vars
............0
............1
......dropout
.........vars
......dropout_1
.........vars
......flatten
.........vars
......max_pooling2d
.........vars
......max_pooling2d_1
.........vars
......max_pooling2d_2
.........vars
......max_pooling2d_3
.........vars
......max_pooling2d_4
.........vars
...metrics
......mean
.........vars
............0
............1
......mean_metric_wrapper
.........vars
............0
............1
...vars
Keras model archive saving:
File Name                                             Modified             Size
config.json                                    2023-06-15 06:18:58         9273
metadata.json                                  2023-06-15 06:18:58           64
variables.h5                                   2023-06-15 06:18:58     15351512
In [ ]:
metric_score(base_model_2_tuned_history,base_model_2_tuned,50,'base_model_2_tuned',model_list, is_image_augmented=True, train_set=train_generator, test_set=test_generator)
128/128 [==============================] - 212s 2s/step - loss: 5.0460 - accuracy: 0.0323
126/126 [==============================] - 217s 2s/step - loss: 5.0862 - accuracy: 0.0298
126/126 [==============================] - 225s 2s/step

Classification Matrix:
               precision    recall  f1-score   support

           0       0.00      0.00      0.00        44
           1       0.00      0.00      0.00        44
           2       0.00      0.00      0.00        32
           3       0.00      0.00      0.00        43
           4       0.00      0.00      0.00        42
           5       0.00      0.00      0.00        40
           6       0.00      0.00      0.00        39
           7       0.00      0.00      0.00        45
           8       0.00      0.00      0.00        41
           9       0.00      0.00      0.00        33
          10       0.00      0.00      0.00        38
          11       0.00      0.00      0.00        40
          12       0.00      0.00      0.00        42
          13       0.00      0.00      0.00        41
          14       0.00      0.00      0.00        43
          15       0.00      0.00      0.00        36
          16       0.00      0.00      0.00        45
          17       0.00      0.00      0.00        39
          18       0.02      0.02      0.02        42
          19       0.00      0.00      0.00        42
          20       0.01      0.04      0.01        46
          21       0.00      0.00      0.00        40
          22       0.00      0.00      0.00        39
          23       0.00      0.00      0.00        42
          24       0.00      0.00      0.00        43
          25       0.00      0.00      0.00        35
          26       0.00      0.00      0.00        41
          27       0.00      0.00      0.00        42
          28       0.00      0.00      0.00        41
          29       0.00      0.00      0.00        44
          30       0.00      0.00      0.00        34
          31       0.00      0.00      0.00        44
          32       0.10      0.02      0.04        41
          33       0.00      0.00      0.00        41
          34       0.00      0.00      0.00        38
          35       0.00      0.00      0.00        41
          36       0.00      0.00      0.00        42
          37       0.00      0.00      0.00        40
          38       0.00      0.05      0.01        39
          39       0.00      0.00      0.00        44
          40       0.00      0.00      0.00        46
          41       0.50      0.03      0.06        34
          42       0.00      0.00      0.00        36
          43       0.00      0.00      0.00        35
          44       0.00      0.00      0.00        32
          45       0.00      0.00      0.00        43
          46       0.00      0.00      0.00        42
          47       0.00      0.00      0.00        42
          48       0.00      0.00      0.00        35
          49       0.00      0.00      0.00        37
          50       0.33      0.02      0.04        43
          51       0.00      0.00      0.00        44
          52       0.00      0.00      0.00        41
          53       0.12      0.02      0.04        45
          54       0.00      0.00      0.00        44
          55       0.00      0.00      0.00        41
          56       0.00      0.00      0.00        39
          57       0.00      0.00      0.00        37
          58       0.00      0.00      0.00        46
          59       0.02      0.03      0.02        29
          60       0.00      0.00      0.00        35
          61       0.00      0.00      0.00        36
          62       0.00      0.00      0.00        43
          63       0.00      0.00      0.00        38
          64       0.01      0.02      0.02        44
          65       0.00      0.00      0.00        45
          66       0.00      0.00      0.00        42
          67       0.00      0.00      0.00        43
          68       0.00      0.00      0.00        40
          69       0.00      0.00      0.00        44
          70       0.00      0.00      0.00        38
          71       0.02      0.02      0.02        44
          72       0.00      0.00      0.00        37
          73       0.00      0.00      0.00        40
          74       0.00      0.00      0.00        44
          75       0.00      0.00      0.00        48
          76       0.00      0.02      0.00        43
          77       0.00      0.00      0.00        43
          78       0.00      0.00      0.00        45
          79       0.00      0.00      0.00        40
          80       0.00      0.00      0.00        37
          81       0.00      0.00      0.00        45
          82       0.00      0.00      0.00        42
          83       0.00      0.00      0.00        40
          84       0.08      0.02      0.04        43
          85       0.00      0.00      0.00        39
          86       0.00      0.00      0.00        42
          87       0.00      0.00      0.00        41
          88       0.00      0.00      0.00        38
          89       0.00      0.00      0.00        41
          90       0.00      0.00      0.00        45
          91       0.00      0.00      0.00        43
          92       0.00      0.00      0.00        44
          93       0.00      0.00      0.00        40
          94       0.00      0.00      0.00        42
          95       0.00      0.00      0.00        44
          96       0.00      0.00      0.00        39
          97       0.00      0.00      0.00        46
          98       0.00      0.00      0.00        27
          99       0.00      0.00      0.00        33
         100       0.01      0.05      0.01        39
         101       0.00      0.00      0.00        42
         102       0.00      0.03      0.01        39
         103       0.00      0.00      0.00        42
         104       0.00      0.00      0.00        43
         105       0.00      0.00      0.00        37
         106       0.00      0.00      0.00        43
         107       0.00      0.00      0.00        44
         108       0.00      0.00      0.00        45
         109       0.00      0.00      0.00        42
         110       0.00      0.00      0.00        41
         111       0.00      0.00      0.00        42
         112       0.00      0.00      0.00        45
         113       0.00      0.00      0.00        44
         114       0.00      0.00      0.00        45
         115       0.00      0.00      0.00        44
         116       0.00      0.00      0.00        42
         117       0.00      0.00      0.00        44
         118       0.00      0.00      0.00        40
         119       0.01      0.19      0.02        68
         120       0.00      0.00      0.00        41
         121       0.00      0.00      0.00        42
         122       0.00      0.00      0.00        44
         123       0.00      0.00      0.00        43
         124       0.00      0.00      0.00        39
         125       0.00      0.00      0.00        39
         126       0.00      0.00      0.00        38
         127       0.02      0.05      0.03        41
         128       0.00      0.00      0.00        42
         129       0.00      0.00      0.00        24
         130       0.00      0.00      0.00        42
         131       0.00      0.00      0.00        42
         132       0.00      0.00      0.00        42
         133       0.00      0.02      0.01        43
         134       0.00      0.00      0.00        42
         135       0.00      0.00      0.00        33
         136       0.00      0.00      0.00        39
         137       0.00      0.00      0.00        43
         138       0.00      0.00      0.00        41
         139       0.00      0.00      0.00        42
         140       0.00      0.00      0.00        34
         141       0.00      0.00      0.00        32
         142       0.00      0.00      0.00        40
         143       0.00      0.00      0.00        46
         144       0.00      0.00      0.00        42
         145       0.00      0.00      0.00        45
         146       0.00      0.00      0.00        44
         147       0.00      0.00      0.00        44
         148       0.00      0.00      0.00        43
         149       0.00      0.00      0.00        43
         150       0.00      0.00      0.00        44
         151       0.01      0.03      0.01        35
         152       0.00      0.00      0.00        36
         153       0.00      0.00      0.00        42
         154       0.00      0.00      0.00        42
         155       0.00      0.05      0.00        39
         156       0.00      0.00      0.00        36
         157       0.00      0.00      0.00        29
         158       0.00      0.00      0.00        36
         159       0.01      0.02      0.01        44
         160       0.00      0.00      0.00        48
         161       0.02      0.02      0.02        45
         162       0.00      0.00      0.00        43
         163       0.00      0.00      0.00        44
         164       0.02      0.03      0.03        36
         165       0.00      0.00      0.00        41
         166       0.00      0.00      0.00        47
         167       0.00      0.00      0.00        46
         168       0.00      0.00      0.00        44
         169       0.01      0.07      0.02        42
         170       0.00      0.00      0.00        38
         171       0.00      0.02      0.01        44
         172       0.01      0.02      0.01        43
         173       0.00      0.00      0.00        41
         174       0.00      0.00      0.00        38
         175       0.00      0.00      0.00        30
         176       0.00      0.00      0.00        44
         177       0.00      0.00      0.00        41
         178       0.00      0.00      0.00        45
         179       0.00      0.00      0.00        42
         180       0.08      0.03      0.04        38
         181       0.00      0.00      0.00        46
         182       0.00      0.00      0.00        42
         183       0.00      0.00      0.00        40
         184       0.00      0.00      0.00        38
         185       0.00      0.00      0.00        40
         186       0.00      0.00      0.00        43
         187       0.00      0.00      0.00        43
         188       0.00      0.00      0.00        38
         189       0.00      0.00      0.00        42
         190       0.00      0.00      0.00        46
         191       0.00      0.00      0.00        43
         192       0.00      0.00      0.00        45
         193       0.00      0.00      0.00        41
         194       0.00      0.00      0.00        43
         195       0.00      0.00      0.00        40

    accuracy                           0.01      8041
   macro avg       0.01      0.01      0.00      8041
weighted avg       0.01      0.01      0.00      8041

Initializing MobileNet CNN with Architecture using transfer learning¶

In [ ]:
backend.clear_session()
tf.random.set_seed(random_state)

base_mobile_net_model = MobileNetV2(input_shape=(224,224,3), include_top=False, weights='imagenet')

x = base_mobile_net_model.output
x = tf.keras.layers.GlobalAveragePooling2D()(x)
x = Dropout(.1)(x)
prediction_layer = tf.keras.layers.Dense(196, activation='softmax')(x)

mobile_net_model=Model(inputs=base_mobile_net_model.input,outputs=prediction_layer)

for layer in mobile_net_model.layers[:70]:
    layer.trainable=False
for layer in mobile_net_model.layers[70:]:
    layer.trainable=True

adam = Adam(learning_rate=0.0001, beta_1=0.9, beta_2=0.999, decay=0.001, clipnorm=0.001)

mobile_net_model.compile(optimizer=adam, loss= CategoricalCrossentropy(from_logits=True), metrics=['accuracy'])

## Looking into our base model
mobile_net_model.summary()
Model: "model"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_1 (InputLayer)           [(None, 224, 224, 3  0           []                               
                                )]                                                                
                                                                                                  
 Conv1 (Conv2D)                 (None, 112, 112, 32  864         ['input_1[0][0]']                
                                )                                                                 
                                                                                                  
 bn_Conv1 (BatchNormalization)  (None, 112, 112, 32  128         ['Conv1[0][0]']                  
                                )                                                                 
                                                                                                  
 Conv1_relu (ReLU)              (None, 112, 112, 32  0           ['bn_Conv1[0][0]']               
                                )                                                                 
                                                                                                  
 expanded_conv_depthwise (Depth  (None, 112, 112, 32  288        ['Conv1_relu[0][0]']             
 wiseConv2D)                    )                                                                 
                                                                                                  
 expanded_conv_depthwise_BN (Ba  (None, 112, 112, 32  128        ['expanded_conv_depthwise[0][0]']
 tchNormalization)              )                                                                 
                                                                                                  
 expanded_conv_depthwise_relu (  (None, 112, 112, 32  0          ['expanded_conv_depthwise_BN[0][0
 ReLU)                          )                                ]']                              
                                                                                                  
 expanded_conv_project (Conv2D)  (None, 112, 112, 16  512        ['expanded_conv_depthwise_relu[0]
                                )                                [0]']                            
                                                                                                  
 expanded_conv_project_BN (Batc  (None, 112, 112, 16  64         ['expanded_conv_project[0][0]']  
 hNormalization)                )                                                                 
                                                                                                  
 block_1_expand (Conv2D)        (None, 112, 112, 96  1536        ['expanded_conv_project_BN[0][0]'
                                )                                ]                                
                                                                                                  
 block_1_expand_BN (BatchNormal  (None, 112, 112, 96  384        ['block_1_expand[0][0]']         
 ization)                       )                                                                 
                                                                                                  
 block_1_expand_relu (ReLU)     (None, 112, 112, 96  0           ['block_1_expand_BN[0][0]']      
                                )                                                                 
                                                                                                  
 block_1_pad (ZeroPadding2D)    (None, 113, 113, 96  0           ['block_1_expand_relu[0][0]']    
                                )                                                                 
                                                                                                  
 block_1_depthwise (DepthwiseCo  (None, 56, 56, 96)  864         ['block_1_pad[0][0]']            
 nv2D)                                                                                            
                                                                                                  
 block_1_depthwise_BN (BatchNor  (None, 56, 56, 96)  384         ['block_1_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_1_depthwise_relu (ReLU)  (None, 56, 56, 96)   0           ['block_1_depthwise_BN[0][0]']   
                                                                                                  
 block_1_project (Conv2D)       (None, 56, 56, 24)   2304        ['block_1_depthwise_relu[0][0]'] 
                                                                                                  
 block_1_project_BN (BatchNorma  (None, 56, 56, 24)  96          ['block_1_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_2_expand (Conv2D)        (None, 56, 56, 144)  3456        ['block_1_project_BN[0][0]']     
                                                                                                  
 block_2_expand_BN (BatchNormal  (None, 56, 56, 144)  576        ['block_2_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_2_expand_relu (ReLU)     (None, 56, 56, 144)  0           ['block_2_expand_BN[0][0]']      
                                                                                                  
 block_2_depthwise (DepthwiseCo  (None, 56, 56, 144)  1296       ['block_2_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_2_depthwise_BN (BatchNor  (None, 56, 56, 144)  576        ['block_2_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_2_depthwise_relu (ReLU)  (None, 56, 56, 144)  0           ['block_2_depthwise_BN[0][0]']   
                                                                                                  
 block_2_project (Conv2D)       (None, 56, 56, 24)   3456        ['block_2_depthwise_relu[0][0]'] 
                                                                                                  
 block_2_project_BN (BatchNorma  (None, 56, 56, 24)  96          ['block_2_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_2_add (Add)              (None, 56, 56, 24)   0           ['block_1_project_BN[0][0]',     
                                                                  'block_2_project_BN[0][0]']     
                                                                                                  
 block_3_expand (Conv2D)        (None, 56, 56, 144)  3456        ['block_2_add[0][0]']            
                                                                                                  
 block_3_expand_BN (BatchNormal  (None, 56, 56, 144)  576        ['block_3_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_3_expand_relu (ReLU)     (None, 56, 56, 144)  0           ['block_3_expand_BN[0][0]']      
                                                                                                  
 block_3_pad (ZeroPadding2D)    (None, 57, 57, 144)  0           ['block_3_expand_relu[0][0]']    
                                                                                                  
 block_3_depthwise (DepthwiseCo  (None, 28, 28, 144)  1296       ['block_3_pad[0][0]']            
 nv2D)                                                                                            
                                                                                                  
 block_3_depthwise_BN (BatchNor  (None, 28, 28, 144)  576        ['block_3_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_3_depthwise_relu (ReLU)  (None, 28, 28, 144)  0           ['block_3_depthwise_BN[0][0]']   
                                                                                                  
 block_3_project (Conv2D)       (None, 28, 28, 32)   4608        ['block_3_depthwise_relu[0][0]'] 
                                                                                                  
 block_3_project_BN (BatchNorma  (None, 28, 28, 32)  128         ['block_3_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_4_expand (Conv2D)        (None, 28, 28, 192)  6144        ['block_3_project_BN[0][0]']     
                                                                                                  
 block_4_expand_BN (BatchNormal  (None, 28, 28, 192)  768        ['block_4_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_4_expand_relu (ReLU)     (None, 28, 28, 192)  0           ['block_4_expand_BN[0][0]']      
                                                                                                  
 block_4_depthwise (DepthwiseCo  (None, 28, 28, 192)  1728       ['block_4_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_4_depthwise_BN (BatchNor  (None, 28, 28, 192)  768        ['block_4_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_4_depthwise_relu (ReLU)  (None, 28, 28, 192)  0           ['block_4_depthwise_BN[0][0]']   
                                                                                                  
 block_4_project (Conv2D)       (None, 28, 28, 32)   6144        ['block_4_depthwise_relu[0][0]'] 
                                                                                                  
 block_4_project_BN (BatchNorma  (None, 28, 28, 32)  128         ['block_4_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_4_add (Add)              (None, 28, 28, 32)   0           ['block_3_project_BN[0][0]',     
                                                                  'block_4_project_BN[0][0]']     
                                                                                                  
 block_5_expand (Conv2D)        (None, 28, 28, 192)  6144        ['block_4_add[0][0]']            
                                                                                                  
 block_5_expand_BN (BatchNormal  (None, 28, 28, 192)  768        ['block_5_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_5_expand_relu (ReLU)     (None, 28, 28, 192)  0           ['block_5_expand_BN[0][0]']      
                                                                                                  
 block_5_depthwise (DepthwiseCo  (None, 28, 28, 192)  1728       ['block_5_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_5_depthwise_BN (BatchNor  (None, 28, 28, 192)  768        ['block_5_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_5_depthwise_relu (ReLU)  (None, 28, 28, 192)  0           ['block_5_depthwise_BN[0][0]']   
                                                                                                  
 block_5_project (Conv2D)       (None, 28, 28, 32)   6144        ['block_5_depthwise_relu[0][0]'] 
                                                                                                  
 block_5_project_BN (BatchNorma  (None, 28, 28, 32)  128         ['block_5_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_5_add (Add)              (None, 28, 28, 32)   0           ['block_4_add[0][0]',            
                                                                  'block_5_project_BN[0][0]']     
                                                                                                  
 block_6_expand (Conv2D)        (None, 28, 28, 192)  6144        ['block_5_add[0][0]']            
                                                                                                  
 block_6_expand_BN (BatchNormal  (None, 28, 28, 192)  768        ['block_6_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_6_expand_relu (ReLU)     (None, 28, 28, 192)  0           ['block_6_expand_BN[0][0]']      
                                                                                                  
 block_6_pad (ZeroPadding2D)    (None, 29, 29, 192)  0           ['block_6_expand_relu[0][0]']    
                                                                                                  
 block_6_depthwise (DepthwiseCo  (None, 14, 14, 192)  1728       ['block_6_pad[0][0]']            
 nv2D)                                                                                            
                                                                                                  
 block_6_depthwise_BN (BatchNor  (None, 14, 14, 192)  768        ['block_6_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_6_depthwise_relu (ReLU)  (None, 14, 14, 192)  0           ['block_6_depthwise_BN[0][0]']   
                                                                                                  
 block_6_project (Conv2D)       (None, 14, 14, 64)   12288       ['block_6_depthwise_relu[0][0]'] 
                                                                                                  
 block_6_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_6_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_7_expand (Conv2D)        (None, 14, 14, 384)  24576       ['block_6_project_BN[0][0]']     
                                                                                                  
 block_7_expand_BN (BatchNormal  (None, 14, 14, 384)  1536       ['block_7_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_7_expand_relu (ReLU)     (None, 14, 14, 384)  0           ['block_7_expand_BN[0][0]']      
                                                                                                  
 block_7_depthwise (DepthwiseCo  (None, 14, 14, 384)  3456       ['block_7_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_7_depthwise_BN (BatchNor  (None, 14, 14, 384)  1536       ['block_7_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_7_depthwise_relu (ReLU)  (None, 14, 14, 384)  0           ['block_7_depthwise_BN[0][0]']   
                                                                                                  
 block_7_project (Conv2D)       (None, 14, 14, 64)   24576       ['block_7_depthwise_relu[0][0]'] 
                                                                                                  
 block_7_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_7_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_7_add (Add)              (None, 14, 14, 64)   0           ['block_6_project_BN[0][0]',     
                                                                  'block_7_project_BN[0][0]']     
                                                                                                  
 block_8_expand (Conv2D)        (None, 14, 14, 384)  24576       ['block_7_add[0][0]']            
                                                                                                  
 block_8_expand_BN (BatchNormal  (None, 14, 14, 384)  1536       ['block_8_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_8_expand_relu (ReLU)     (None, 14, 14, 384)  0           ['block_8_expand_BN[0][0]']      
                                                                                                  
 block_8_depthwise (DepthwiseCo  (None, 14, 14, 384)  3456       ['block_8_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_8_depthwise_BN (BatchNor  (None, 14, 14, 384)  1536       ['block_8_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_8_depthwise_relu (ReLU)  (None, 14, 14, 384)  0           ['block_8_depthwise_BN[0][0]']   
                                                                                                  
 block_8_project (Conv2D)       (None, 14, 14, 64)   24576       ['block_8_depthwise_relu[0][0]'] 
                                                                                                  
 block_8_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_8_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_8_add (Add)              (None, 14, 14, 64)   0           ['block_7_add[0][0]',            
                                                                  'block_8_project_BN[0][0]']     
                                                                                                  
 block_9_expand (Conv2D)        (None, 14, 14, 384)  24576       ['block_8_add[0][0]']            
                                                                                                  
 block_9_expand_BN (BatchNormal  (None, 14, 14, 384)  1536       ['block_9_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_9_expand_relu (ReLU)     (None, 14, 14, 384)  0           ['block_9_expand_BN[0][0]']      
                                                                                                  
 block_9_depthwise (DepthwiseCo  (None, 14, 14, 384)  3456       ['block_9_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_9_depthwise_BN (BatchNor  (None, 14, 14, 384)  1536       ['block_9_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_9_depthwise_relu (ReLU)  (None, 14, 14, 384)  0           ['block_9_depthwise_BN[0][0]']   
                                                                                                  
 block_9_project (Conv2D)       (None, 14, 14, 64)   24576       ['block_9_depthwise_relu[0][0]'] 
                                                                                                  
 block_9_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_9_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_9_add (Add)              (None, 14, 14, 64)   0           ['block_8_add[0][0]',            
                                                                  'block_9_project_BN[0][0]']     
                                                                                                  
 block_10_expand (Conv2D)       (None, 14, 14, 384)  24576       ['block_9_add[0][0]']            
                                                                                                  
 block_10_expand_BN (BatchNorma  (None, 14, 14, 384)  1536       ['block_10_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_10_expand_relu (ReLU)    (None, 14, 14, 384)  0           ['block_10_expand_BN[0][0]']     
                                                                                                  
 block_10_depthwise (DepthwiseC  (None, 14, 14, 384)  3456       ['block_10_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_10_depthwise_BN (BatchNo  (None, 14, 14, 384)  1536       ['block_10_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_10_depthwise_relu (ReLU)  (None, 14, 14, 384)  0          ['block_10_depthwise_BN[0][0]']  
                                                                                                  
 block_10_project (Conv2D)      (None, 14, 14, 96)   36864       ['block_10_depthwise_relu[0][0]']
                                                                                                  
 block_10_project_BN (BatchNorm  (None, 14, 14, 96)  384         ['block_10_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_11_expand (Conv2D)       (None, 14, 14, 576)  55296       ['block_10_project_BN[0][0]']    
                                                                                                  
 block_11_expand_BN (BatchNorma  (None, 14, 14, 576)  2304       ['block_11_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_11_expand_relu (ReLU)    (None, 14, 14, 576)  0           ['block_11_expand_BN[0][0]']     
                                                                                                  
 block_11_depthwise (DepthwiseC  (None, 14, 14, 576)  5184       ['block_11_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_11_depthwise_BN (BatchNo  (None, 14, 14, 576)  2304       ['block_11_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_11_depthwise_relu (ReLU)  (None, 14, 14, 576)  0          ['block_11_depthwise_BN[0][0]']  
                                                                                                  
 block_11_project (Conv2D)      (None, 14, 14, 96)   55296       ['block_11_depthwise_relu[0][0]']
                                                                                                  
 block_11_project_BN (BatchNorm  (None, 14, 14, 96)  384         ['block_11_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_11_add (Add)             (None, 14, 14, 96)   0           ['block_10_project_BN[0][0]',    
                                                                  'block_11_project_BN[0][0]']    
                                                                                                  
 block_12_expand (Conv2D)       (None, 14, 14, 576)  55296       ['block_11_add[0][0]']           
                                                                                                  
 block_12_expand_BN (BatchNorma  (None, 14, 14, 576)  2304       ['block_12_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_12_expand_relu (ReLU)    (None, 14, 14, 576)  0           ['block_12_expand_BN[0][0]']     
                                                                                                  
 block_12_depthwise (DepthwiseC  (None, 14, 14, 576)  5184       ['block_12_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_12_depthwise_BN (BatchNo  (None, 14, 14, 576)  2304       ['block_12_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_12_depthwise_relu (ReLU)  (None, 14, 14, 576)  0          ['block_12_depthwise_BN[0][0]']  
                                                                                                  
 block_12_project (Conv2D)      (None, 14, 14, 96)   55296       ['block_12_depthwise_relu[0][0]']
                                                                                                  
 block_12_project_BN (BatchNorm  (None, 14, 14, 96)  384         ['block_12_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_12_add (Add)             (None, 14, 14, 96)   0           ['block_11_add[0][0]',           
                                                                  'block_12_project_BN[0][0]']    
                                                                                                  
 block_13_expand (Conv2D)       (None, 14, 14, 576)  55296       ['block_12_add[0][0]']           
                                                                                                  
 block_13_expand_BN (BatchNorma  (None, 14, 14, 576)  2304       ['block_13_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_13_expand_relu (ReLU)    (None, 14, 14, 576)  0           ['block_13_expand_BN[0][0]']     
                                                                                                  
 block_13_pad (ZeroPadding2D)   (None, 15, 15, 576)  0           ['block_13_expand_relu[0][0]']   
                                                                                                  
 block_13_depthwise (DepthwiseC  (None, 7, 7, 576)   5184        ['block_13_pad[0][0]']           
 onv2D)                                                                                           
                                                                                                  
 block_13_depthwise_BN (BatchNo  (None, 7, 7, 576)   2304        ['block_13_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_13_depthwise_relu (ReLU)  (None, 7, 7, 576)   0           ['block_13_depthwise_BN[0][0]']  
                                                                                                  
 block_13_project (Conv2D)      (None, 7, 7, 160)    92160       ['block_13_depthwise_relu[0][0]']
                                                                                                  
 block_13_project_BN (BatchNorm  (None, 7, 7, 160)   640         ['block_13_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_14_expand (Conv2D)       (None, 7, 7, 960)    153600      ['block_13_project_BN[0][0]']    
                                                                                                  
 block_14_expand_BN (BatchNorma  (None, 7, 7, 960)   3840        ['block_14_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_14_expand_relu (ReLU)    (None, 7, 7, 960)    0           ['block_14_expand_BN[0][0]']     
                                                                                                  
 block_14_depthwise (DepthwiseC  (None, 7, 7, 960)   8640        ['block_14_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_14_depthwise_BN (BatchNo  (None, 7, 7, 960)   3840        ['block_14_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_14_depthwise_relu (ReLU)  (None, 7, 7, 960)   0           ['block_14_depthwise_BN[0][0]']  
                                                                                                  
 block_14_project (Conv2D)      (None, 7, 7, 160)    153600      ['block_14_depthwise_relu[0][0]']
                                                                                                  
 block_14_project_BN (BatchNorm  (None, 7, 7, 160)   640         ['block_14_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_14_add (Add)             (None, 7, 7, 160)    0           ['block_13_project_BN[0][0]',    
                                                                  'block_14_project_BN[0][0]']    
                                                                                                  
 block_15_expand (Conv2D)       (None, 7, 7, 960)    153600      ['block_14_add[0][0]']           
                                                                                                  
 block_15_expand_BN (BatchNorma  (None, 7, 7, 960)   3840        ['block_15_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_15_expand_relu (ReLU)    (None, 7, 7, 960)    0           ['block_15_expand_BN[0][0]']     
                                                                                                  
 block_15_depthwise (DepthwiseC  (None, 7, 7, 960)   8640        ['block_15_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_15_depthwise_BN (BatchNo  (None, 7, 7, 960)   3840        ['block_15_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_15_depthwise_relu (ReLU)  (None, 7, 7, 960)   0           ['block_15_depthwise_BN[0][0]']  
                                                                                                  
 block_15_project (Conv2D)      (None, 7, 7, 160)    153600      ['block_15_depthwise_relu[0][0]']
                                                                                                  
 block_15_project_BN (BatchNorm  (None, 7, 7, 160)   640         ['block_15_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_15_add (Add)             (None, 7, 7, 160)    0           ['block_14_add[0][0]',           
                                                                  'block_15_project_BN[0][0]']    
                                                                                                  
 block_16_expand (Conv2D)       (None, 7, 7, 960)    153600      ['block_15_add[0][0]']           
                                                                                                  
 block_16_expand_BN (BatchNorma  (None, 7, 7, 960)   3840        ['block_16_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_16_expand_relu (ReLU)    (None, 7, 7, 960)    0           ['block_16_expand_BN[0][0]']     
                                                                                                  
 block_16_depthwise (DepthwiseC  (None, 7, 7, 960)   8640        ['block_16_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_16_depthwise_BN (BatchNo  (None, 7, 7, 960)   3840        ['block_16_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_16_depthwise_relu (ReLU)  (None, 7, 7, 960)   0           ['block_16_depthwise_BN[0][0]']  
                                                                                                  
 block_16_project (Conv2D)      (None, 7, 7, 320)    307200      ['block_16_depthwise_relu[0][0]']
                                                                                                  
 block_16_project_BN (BatchNorm  (None, 7, 7, 320)   1280        ['block_16_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 Conv_1 (Conv2D)                (None, 7, 7, 1280)   409600      ['block_16_project_BN[0][0]']    
                                                                                                  
 Conv_1_bn (BatchNormalization)  (None, 7, 7, 1280)  5120        ['Conv_1[0][0]']                 
                                                                                                  
 out_relu (ReLU)                (None, 7, 7, 1280)   0           ['Conv_1_bn[0][0]']              
                                                                                                  
 global_average_pooling2d (Glob  (None, 1280)        0           ['out_relu[0][0]']               
 alAveragePooling2D)                                                                              
                                                                                                  
 dropout (Dropout)              (None, 1280)         0           ['global_average_pooling2d[0][0]'
                                                                 ]                                
                                                                                                  
 dense (Dense)                  (None, 196)          251076      ['dropout[0][0]']                
                                                                                                  
==================================================================================================
Total params: 2,509,060
Trainable params: 2,344,260
Non-trainable params: 164,800
__________________________________________________________________________________________________
In [ ]:
visualkeras_view(mobile_net_model)
Out[ ]:
In [ ]:
# Fit the model
mobile_net_model_history=mobile_net_model.fit(train_generator, validation_data=test_generator, epochs=60, verbose=1,  callbacks=create_callbacks('mobile_net_model'))
Epoch 1/60
2023-06-20 16:13:57.473499: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype int32
	 [[{{node Placeholder/_0}}]]
128/128 [==============================] - ETA: 0s - loss: 5.3052 - accuracy: 0.0152
2023-06-20 16:20:31.096848: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype int32
	 [[{{node Placeholder/_0}}]]
Epoch 1: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 627s 5s/step - loss: 5.3052 - accuracy: 0.0152 - val_loss: 5.2289 - val_accuracy: 0.0219 - lr: 1.0000e-04
Epoch 2/60
128/128 [==============================] - ETA: 0s - loss: 4.6731 - accuracy: 0.0840
Epoch 2: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 613s 5s/step - loss: 4.6731 - accuracy: 0.0840 - val_loss: 4.6484 - val_accuracy: 0.0580 - lr: 1.0000e-04
Epoch 3/60
128/128 [==============================] - ETA: 0s - loss: 3.9227 - accuracy: 0.1891
Epoch 3: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 673s 5s/step - loss: 3.9227 - accuracy: 0.1891 - val_loss: 4.1415 - val_accuracy: 0.1134 - lr: 1.0000e-04
Epoch 4/60
128/128 [==============================] - ETA: 0s - loss: 3.2736 - accuracy: 0.3100
Epoch 4: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 673s 5s/step - loss: 3.2736 - accuracy: 0.3100 - val_loss: 3.7558 - val_accuracy: 0.1655 - lr: 1.0000e-04
Epoch 5/60
128/128 [==============================] - ETA: 0s - loss: 2.7614 - accuracy: 0.4063
Epoch 5: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 675s 5s/step - loss: 2.7614 - accuracy: 0.4063 - val_loss: 3.4598 - val_accuracy: 0.2117 - lr: 1.0000e-04
Epoch 6/60
128/128 [==============================] - ETA: 0s - loss: 2.3496 - accuracy: 0.4936
Epoch 6: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 677s 5s/step - loss: 2.3496 - accuracy: 0.4936 - val_loss: 3.1178 - val_accuracy: 0.2852 - lr: 1.0000e-04
Epoch 7/60
128/128 [==============================] - ETA: 0s - loss: 2.0549 - accuracy: 0.5556
Epoch 7: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 670s 5s/step - loss: 2.0549 - accuracy: 0.5556 - val_loss: 2.8982 - val_accuracy: 0.3273 - lr: 1.0000e-04
Epoch 8/60
128/128 [==============================] - ETA: 0s - loss: 1.8029 - accuracy: 0.6100
Epoch 8: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 669s 5s/step - loss: 1.8029 - accuracy: 0.6100 - val_loss: 2.6513 - val_accuracy: 0.3815 - lr: 1.0000e-04
Epoch 9/60
128/128 [==============================] - ETA: 0s - loss: 1.6010 - accuracy: 0.6532
Epoch 9: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 670s 5s/step - loss: 1.6010 - accuracy: 0.6532 - val_loss: 2.5277 - val_accuracy: 0.4028 - lr: 1.0000e-04
Epoch 10/60
128/128 [==============================] - ETA: 0s - loss: 1.4556 - accuracy: 0.6783
Epoch 10: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 669s 5s/step - loss: 1.4556 - accuracy: 0.6783 - val_loss: 2.3253 - val_accuracy: 0.4538 - lr: 1.0000e-04
Epoch 11/60
128/128 [==============================] - ETA: 0s - loss: 1.3184 - accuracy: 0.7107
Epoch 11: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 661s 5s/step - loss: 1.3184 - accuracy: 0.7107 - val_loss: 2.2034 - val_accuracy: 0.4799 - lr: 1.0000e-04
Epoch 12/60
128/128 [==============================] - ETA: 0s - loss: 1.2002 - accuracy: 0.7366
Epoch 12: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 987s 8s/step - loss: 1.2002 - accuracy: 0.7366 - val_loss: 2.0736 - val_accuracy: 0.5043 - lr: 1.0000e-04
Epoch 13/60
128/128 [==============================] - ETA: 0s - loss: 1.1124 - accuracy: 0.7549
Epoch 13: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 981s 8s/step - loss: 1.1124 - accuracy: 0.7549 - val_loss: 1.9263 - val_accuracy: 0.5377 - lr: 1.0000e-04
Epoch 14/60
128/128 [==============================] - ETA: 0s - loss: 1.0187 - accuracy: 0.7753
Epoch 14: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 947s 7s/step - loss: 1.0187 - accuracy: 0.7753 - val_loss: 1.7677 - val_accuracy: 0.5752 - lr: 1.0000e-04
Epoch 15/60
128/128 [==============================] - ETA: 0s - loss: 0.9393 - accuracy: 0.7929
Epoch 15: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 740s 6s/step - loss: 0.9393 - accuracy: 0.7929 - val_loss: 1.6717 - val_accuracy: 0.5978 - lr: 1.0000e-04
Epoch 16/60
128/128 [==============================] - ETA: 0s - loss: 0.8784 - accuracy: 0.8084
Epoch 16: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 705s 6s/step - loss: 0.8784 - accuracy: 0.8084 - val_loss: 1.5816 - val_accuracy: 0.6134 - lr: 1.0000e-04
Epoch 17/60
128/128 [==============================] - ETA: 0s - loss: 0.8128 - accuracy: 0.8261
Epoch 17: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 685s 5s/step - loss: 0.8128 - accuracy: 0.8261 - val_loss: 1.5019 - val_accuracy: 0.6300 - lr: 1.0000e-04
Epoch 18/60
128/128 [==============================] - ETA: 0s - loss: 0.7643 - accuracy: 0.8353
Epoch 18: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 663s 5s/step - loss: 0.7643 - accuracy: 0.8353 - val_loss: 1.4229 - val_accuracy: 0.6481 - lr: 1.0000e-04
Epoch 19/60
128/128 [==============================] - ETA: 0s - loss: 0.7218 - accuracy: 0.8455
Epoch 19: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 574s 4s/step - loss: 0.7218 - accuracy: 0.8455 - val_loss: 1.3534 - val_accuracy: 0.6615 - lr: 1.0000e-04
Epoch 20/60
128/128 [==============================] - ETA: 0s - loss: 0.6745 - accuracy: 0.8536
Epoch 20: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 542s 4s/step - loss: 0.6745 - accuracy: 0.8536 - val_loss: 1.2840 - val_accuracy: 0.6775 - lr: 1.0000e-04
Epoch 21/60
128/128 [==============================] - ETA: 0s - loss: 0.6423 - accuracy: 0.8615
Epoch 21: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 542s 4s/step - loss: 0.6423 - accuracy: 0.8615 - val_loss: 1.2533 - val_accuracy: 0.6839 - lr: 1.0000e-04
Epoch 22/60
128/128 [==============================] - ETA: 0s - loss: 0.6122 - accuracy: 0.8712
Epoch 22: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 537s 4s/step - loss: 0.6122 - accuracy: 0.8712 - val_loss: 1.2185 - val_accuracy: 0.6934 - lr: 1.0000e-04
Epoch 23/60
128/128 [==============================] - ETA: 0s - loss: 0.5833 - accuracy: 0.8759
Epoch 23: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 563s 4s/step - loss: 0.5833 - accuracy: 0.8759 - val_loss: 1.1634 - val_accuracy: 0.7046 - lr: 1.0000e-04
Epoch 24/60
128/128 [==============================] - ETA: 0s - loss: 0.5448 - accuracy: 0.8908
Epoch 24: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 604s 5s/step - loss: 0.5448 - accuracy: 0.8908 - val_loss: 1.1408 - val_accuracy: 0.7079 - lr: 1.0000e-04
Epoch 25/60
128/128 [==============================] - ETA: 0s - loss: 0.5256 - accuracy: 0.8901
Epoch 25: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 625s 5s/step - loss: 0.5256 - accuracy: 0.8901 - val_loss: 1.1110 - val_accuracy: 0.7172 - lr: 1.0000e-04
Epoch 26/60
128/128 [==============================] - ETA: 0s - loss: 0.4865 - accuracy: 0.9028
Epoch 26: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 633s 5s/step - loss: 0.4865 - accuracy: 0.9028 - val_loss: 1.0609 - val_accuracy: 0.7235 - lr: 1.0000e-04
Epoch 27/60
128/128 [==============================] - ETA: 0s - loss: 0.4749 - accuracy: 0.8998
Epoch 27: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 642s 5s/step - loss: 0.4749 - accuracy: 0.8998 - val_loss: 1.0578 - val_accuracy: 0.7285 - lr: 1.0000e-04
Epoch 28/60
128/128 [==============================] - ETA: 0s - loss: 0.4616 - accuracy: 0.9053
Epoch 28: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 617s 5s/step - loss: 0.4616 - accuracy: 0.9053 - val_loss: 1.0141 - val_accuracy: 0.7381 - lr: 1.0000e-04
Epoch 29/60
128/128 [==============================] - ETA: 0s - loss: 0.4370 - accuracy: 0.9100
Epoch 29: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 635s 5s/step - loss: 0.4370 - accuracy: 0.9100 - val_loss: 1.0011 - val_accuracy: 0.7417 - lr: 1.0000e-04
Epoch 30/60
128/128 [==============================] - ETA: 0s - loss: 0.4195 - accuracy: 0.9153
Epoch 30: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 618s 5s/step - loss: 0.4195 - accuracy: 0.9153 - val_loss: 0.9892 - val_accuracy: 0.7401 - lr: 1.0000e-04
Epoch 31/60
128/128 [==============================] - ETA: 0s - loss: 0.4107 - accuracy: 0.9174
Epoch 31: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 620s 5s/step - loss: 0.4107 - accuracy: 0.9174 - val_loss: 0.9684 - val_accuracy: 0.7454 - lr: 1.0000e-04
Epoch 32/60
128/128 [==============================] - ETA: 0s - loss: 0.3950 - accuracy: 0.9182
Epoch 32: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 649s 5s/step - loss: 0.3950 - accuracy: 0.9182 - val_loss: 0.9523 - val_accuracy: 0.7469 - lr: 1.0000e-04
Epoch 33/60
128/128 [==============================] - ETA: 0s - loss: 0.3701 - accuracy: 0.9284
Epoch 33: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 637s 5s/step - loss: 0.3701 - accuracy: 0.9284 - val_loss: 0.9385 - val_accuracy: 0.7528 - lr: 1.0000e-04
Epoch 34/60
128/128 [==============================] - ETA: 0s - loss: 0.3672 - accuracy: 0.9268
Epoch 34: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 671s 5s/step - loss: 0.3672 - accuracy: 0.9268 - val_loss: 0.9239 - val_accuracy: 0.7538 - lr: 1.0000e-04
Epoch 35/60
128/128 [==============================] - ETA: 0s - loss: 0.3509 - accuracy: 0.9343
Epoch 35: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 642s 5s/step - loss: 0.3509 - accuracy: 0.9343 - val_loss: 0.9074 - val_accuracy: 0.7564 - lr: 1.0000e-04
Epoch 36/60
128/128 [==============================] - ETA: 0s - loss: 0.3339 - accuracy: 0.9384
Epoch 36: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 639s 5s/step - loss: 0.3339 - accuracy: 0.9384 - val_loss: 0.9038 - val_accuracy: 0.7613 - lr: 1.0000e-04
Epoch 37/60
128/128 [==============================] - ETA: 0s - loss: 0.3265 - accuracy: 0.9366
Epoch 37: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 642s 5s/step - loss: 0.3265 - accuracy: 0.9366 - val_loss: 0.8805 - val_accuracy: 0.7643 - lr: 1.0000e-04
Epoch 38/60
128/128 [==============================] - ETA: 0s - loss: 0.3125 - accuracy: 0.9400
Epoch 38: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 646s 5s/step - loss: 0.3125 - accuracy: 0.9400 - val_loss: 0.8782 - val_accuracy: 0.7658 - lr: 1.0000e-04
Epoch 39/60
128/128 [==============================] - ETA: 0s - loss: 0.3040 - accuracy: 0.9430
Epoch 39: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 648s 5s/step - loss: 0.3040 - accuracy: 0.9430 - val_loss: 0.8668 - val_accuracy: 0.7686 - lr: 1.0000e-04
Epoch 40/60
128/128 [==============================] - ETA: 0s - loss: 0.2987 - accuracy: 0.9411
Epoch 40: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 653s 5s/step - loss: 0.2987 - accuracy: 0.9411 - val_loss: 0.8625 - val_accuracy: 0.7748 - lr: 1.0000e-04
Epoch 41/60
128/128 [==============================] - ETA: 0s - loss: 0.2927 - accuracy: 0.9447
Epoch 41: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 654s 5s/step - loss: 0.2927 - accuracy: 0.9447 - val_loss: 0.8617 - val_accuracy: 0.7712 - lr: 1.0000e-04
Epoch 42/60
128/128 [==============================] - ETA: 0s - loss: 0.2764 - accuracy: 0.9477
Epoch 42: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 664s 5s/step - loss: 0.2764 - accuracy: 0.9477 - val_loss: 0.8337 - val_accuracy: 0.7780 - lr: 1.0000e-04
Epoch 43/60
128/128 [==============================] - ETA: 0s - loss: 0.2778 - accuracy: 0.9489
Epoch 43: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 660s 5s/step - loss: 0.2778 - accuracy: 0.9489 - val_loss: 0.8323 - val_accuracy: 0.7753 - lr: 1.0000e-04
Epoch 44/60
128/128 [==============================] - ETA: 0s - loss: 0.2662 - accuracy: 0.9499
Epoch 44: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 661s 5s/step - loss: 0.2662 - accuracy: 0.9499 - val_loss: 0.8276 - val_accuracy: 0.7765 - lr: 1.0000e-04
Epoch 45/60
128/128 [==============================] - ETA: 0s - loss: 0.2599 - accuracy: 0.9514
Epoch 45: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 659s 5s/step - loss: 0.2599 - accuracy: 0.9514 - val_loss: 0.8196 - val_accuracy: 0.7793 - lr: 1.0000e-04
Epoch 46/60
128/128 [==============================] - ETA: 0s - loss: 0.2483 - accuracy: 0.9567
Epoch 46: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 662s 5s/step - loss: 0.2483 - accuracy: 0.9567 - val_loss: 0.8150 - val_accuracy: 0.7766 - lr: 1.0000e-04
Epoch 47/60
128/128 [==============================] - ETA: 0s - loss: 0.2437 - accuracy: 0.9563
Epoch 47: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 668s 5s/step - loss: 0.2437 - accuracy: 0.9563 - val_loss: 0.8237 - val_accuracy: 0.7718 - lr: 1.0000e-04
Epoch 48/60
128/128 [==============================] - ETA: 0s - loss: 0.2416 - accuracy: 0.9568
Epoch 48: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 673s 5s/step - loss: 0.2416 - accuracy: 0.9568 - val_loss: 0.8122 - val_accuracy: 0.7817 - lr: 1.0000e-04
Epoch 49/60
128/128 [==============================] - ETA: 0s - loss: 0.2339 - accuracy: 0.9560
Epoch 49: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 666s 5s/step - loss: 0.2339 - accuracy: 0.9560 - val_loss: 0.8116 - val_accuracy: 0.7783 - lr: 1.0000e-04
Epoch 50/60
128/128 [==============================] - ETA: 0s - loss: 0.2305 - accuracy: 0.9573
Epoch 50: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 670s 5s/step - loss: 0.2305 - accuracy: 0.9573 - val_loss: 0.7923 - val_accuracy: 0.7881 - lr: 1.0000e-04
Epoch 51/60
128/128 [==============================] - ETA: 0s - loss: 0.2259 - accuracy: 0.9589
Epoch 51: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 669s 5s/step - loss: 0.2259 - accuracy: 0.9589 - val_loss: 0.7961 - val_accuracy: 0.7849 - lr: 1.0000e-04
Epoch 52/60
128/128 [==============================] - ETA: 0s - loss: 0.2160 - accuracy: 0.9612
Epoch 52: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 666s 5s/step - loss: 0.2160 - accuracy: 0.9612 - val_loss: 0.7950 - val_accuracy: 0.7785 - lr: 1.0000e-04
Epoch 53/60
128/128 [==============================] - ETA: 0s - loss: 0.2187 - accuracy: 0.9605
Epoch 53: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 663s 5s/step - loss: 0.2187 - accuracy: 0.9605 - val_loss: 0.7820 - val_accuracy: 0.7933 - lr: 1.0000e-04
Epoch 54/60
128/128 [==============================] - ETA: 0s - loss: 0.2141 - accuracy: 0.9617
Epoch 54: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 663s 5s/step - loss: 0.2141 - accuracy: 0.9617 - val_loss: 0.7679 - val_accuracy: 0.7842 - lr: 1.0000e-04
Epoch 55/60
128/128 [==============================] - ETA: 0s - loss: 0.2019 - accuracy: 0.9660
Epoch 55: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 668s 5s/step - loss: 0.2019 - accuracy: 0.9660 - val_loss: 0.7771 - val_accuracy: 0.7906 - lr: 1.0000e-04
Epoch 56/60
128/128 [==============================] - ETA: 0s - loss: 0.1997 - accuracy: 0.9666
Epoch 56: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 663s 5s/step - loss: 0.1997 - accuracy: 0.9666 - val_loss: 0.7735 - val_accuracy: 0.7893 - lr: 1.0000e-04
Epoch 57/60
128/128 [==============================] - ETA: 0s - loss: 0.1936 - accuracy: 0.9656
Epoch 57: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 665s 5s/step - loss: 0.1936 - accuracy: 0.9656 - val_loss: 0.7561 - val_accuracy: 0.7953 - lr: 1.0000e-04
Epoch 58/60
128/128 [==============================] - ETA: 0s - loss: 0.1889 - accuracy: 0.9693
Epoch 58: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 662s 5s/step - loss: 0.1889 - accuracy: 0.9693 - val_loss: 0.7544 - val_accuracy: 0.7957 - lr: 1.0000e-04
Epoch 59/60
128/128 [==============================] - ETA: 0s - loss: 0.1846 - accuracy: 0.9662
Epoch 59: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 664s 5s/step - loss: 0.1846 - accuracy: 0.9662 - val_loss: 0.7549 - val_accuracy: 0.7890 - lr: 1.0000e-04
Epoch 60/60
128/128 [==============================] - ETA: 0s - loss: 0.1843 - accuracy: 0.9691
Epoch 60: saving model to mobile_net_model_weights.h5
128/128 [==============================] - 660s 5s/step - loss: 0.1843 - accuracy: 0.9691 - val_loss: 0.7614 - val_accuracy: 0.7911 - lr: 1.0000e-04
In [ ]:
# create an iterator object with write permission - base_model_2.pkl
with open('mobile_net_model.pkl', 'wb') as files:
    pickle.dump(mobile_net_model, files)
mobile_net_model.save('mobile_net_model.h5')
In [ ]:
metric_score(mobile_net_model_history,mobile_net_model,200,'mobile_net_model',model_list, is_image_augmented=True, train_set=train_generator, test_set=test_generator)
2023-06-21 03:18:05.928186: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype int32
	 [[{{node Placeholder/_0}}]]
128/128 [==============================] - 263s 2s/step - loss: 0.1280 - accuracy: 0.9815
2023-06-21 03:22:29.831084: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype int32
	 [[{{node Placeholder/_0}}]]
126/126 [==============================] - 259s 2s/step - loss: 0.7583 - accuracy: 0.7929
2023-06-21 03:26:50.155536: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'Placeholder/_0' with dtype int32
	 [[{{node Placeholder/_0}}]]
126/126 [==============================] - 260s 2s/step

Classification Matrix:
               precision    recall  f1-score   support

           0       0.00      0.00      0.00        44
           1       0.00      0.00      0.00        44
           2       0.00      0.00      0.00        32
           3       0.00      0.00      0.00        43
           4       0.02      0.02      0.02        42
           5       0.00      0.00      0.00        40
           6       0.00      0.00      0.00        39
           7       0.02      0.02      0.02        45
           8       0.00      0.00      0.00        41
           9       0.00      0.00      0.00        33
          10       0.00      0.00      0.00        38
          11       0.00      0.00      0.00        40
          12       0.00      0.00      0.00        42
          13       0.00      0.00      0.00        41
          14       0.00      0.00      0.00        43
          15       0.00      0.00      0.00        36
          16       0.00      0.00      0.00        45
          17       0.00      0.00      0.00        39
          18       0.00      0.00      0.00        42
          19       0.00      0.00      0.00        42
          20       0.00      0.00      0.00        46
          21       0.00      0.00      0.00        40
          22       0.00      0.00      0.00        39
          23       0.00      0.00      0.00        42
          24       0.00      0.00      0.00        43
          25       0.00      0.00      0.00        35
          26       0.00      0.00      0.00        41
          27       0.00      0.00      0.00        42
          28       0.00      0.00      0.00        41
          29       0.00      0.00      0.00        44
          30       0.00      0.00      0.00        34
          31       0.00      0.00      0.00        44
          32       0.00      0.00      0.00        41
          33       0.00      0.00      0.00        41
          34       0.00      0.00      0.00        38
          35       0.02      0.02      0.02        41
          36       0.04      0.05      0.04        42
          37       0.00      0.00      0.00        40
          38       0.00      0.00      0.00        39
          39       0.00      0.00      0.00        44
          40       0.00      0.00      0.00        46
          41       0.00      0.00      0.00        34
          42       0.00      0.00      0.00        36
          43       0.00      0.00      0.00        35
          44       0.00      0.00      0.00        32
          45       0.02      0.02      0.02        43
          46       0.00      0.00      0.00        42
          47       0.00      0.00      0.00        42
          48       0.00      0.00      0.00        35
          49       0.03      0.03      0.03        37
          50       0.00      0.00      0.00        43
          51       0.00      0.00      0.00        44
          52       0.00      0.00      0.00        41
          53       0.00      0.00      0.00        45
          54       0.00      0.00      0.00        44
          55       0.00      0.00      0.00        41
          56       0.00      0.00      0.00        39
          57       0.00      0.00      0.00        37
          58       0.04      0.04      0.04        46
          59       0.00      0.00      0.00        29
          60       0.00      0.00      0.00        35
          61       0.00      0.00      0.00        36
          62       0.00      0.00      0.00        43
          63       0.00      0.00      0.00        38
          64       0.00      0.00      0.00        44
          65       0.00      0.00      0.00        45
          66       0.00      0.00      0.00        42
          67       0.00      0.00      0.00        43
          68       0.00      0.00      0.00        40
          69       0.00      0.00      0.00        44
          70       0.00      0.00      0.00        38
          71       0.00      0.00      0.00        44
          72       0.00      0.00      0.00        37
          73       0.00      0.00      0.00        40
          74       0.00      0.00      0.00        44
          75       0.04      0.04      0.04        48
          76       0.02      0.02      0.02        43
          77       0.00      0.00      0.00        43
          78       0.00      0.00      0.00        45
          79       0.00      0.00      0.00        40
          80       0.00      0.00      0.00        37
          81       0.00      0.00      0.00        45
          82       0.00      0.00      0.00        42
          83       0.02      0.03      0.02        40
          84       0.00      0.00      0.00        43
          85       0.00      0.00      0.00        39
          86       0.00      0.00      0.00        42
          87       0.00      0.00      0.00        41
          88       0.00      0.00      0.00        38
          89       0.00      0.00      0.00        41
          90       0.00      0.00      0.00        45
          91       0.00      0.00      0.00        43
          92       0.02      0.02      0.02        44
          93       0.03      0.03      0.03        40
          94       0.02      0.02      0.02        42
          95       0.00      0.00      0.00        44
          96       0.03      0.03      0.03        39
          97       0.00      0.00      0.00        46
          98       0.00      0.00      0.00        27
          99       0.03      0.03      0.03        33
         100       0.00      0.00      0.00        39
         101       0.00      0.00      0.00        42
         102       0.00      0.00      0.00        39
         103       0.00      0.00      0.00        42
         104       0.00      0.00      0.00        43
         105       0.00      0.00      0.00        37
         106       0.04      0.05      0.04        43
         107       0.03      0.02      0.02        44
         108       0.00      0.00      0.00        45
         109       0.00      0.00      0.00        42
         110       0.00      0.00      0.00        41
         111       0.03      0.02      0.02        42
         112       0.00      0.00      0.00        45
         113       0.00      0.00      0.00        44
         114       0.00      0.00      0.00        45
         115       0.00      0.00      0.00        44
         116       0.02      0.02      0.02        42
         117       0.00      0.00      0.00        44
         118       0.00      0.00      0.00        40
         119       0.01      0.01      0.01        68
         120       0.00      0.00      0.00        41
         121       0.00      0.00      0.00        42
         122       0.00      0.00      0.00        44
         123       0.03      0.02      0.03        43
         124       0.02      0.03      0.02        39
         125       0.00      0.00      0.00        39
         126       0.02      0.03      0.03        38
         127       0.00      0.00      0.00        41
         128       0.00      0.00      0.00        42
         129       0.00      0.00      0.00        24
         130       0.00      0.00      0.00        42
         131       0.00      0.00      0.00        42
         132       0.02      0.02      0.02        42
         133       0.02      0.02      0.02        43
         134       0.00      0.00      0.00        42
         135       0.00      0.00      0.00        33
         136       0.00      0.00      0.00        39
         137       0.00      0.00      0.00        43
         138       0.00      0.00      0.00        41
         139       0.00      0.00      0.00        42
         140       0.00      0.00      0.00        34
         141       0.00      0.00      0.00        32
         142       0.00      0.00      0.00        40
         143       0.00      0.00      0.00        46
         144       0.00      0.00      0.00        42
         145       0.00      0.00      0.00        45
         146       0.00      0.00      0.00        44
         147       0.00      0.00      0.00        44
         148       0.00      0.00      0.00        43
         149       0.00      0.00      0.00        43
         150       0.02      0.02      0.02        44
         151       0.00      0.00      0.00        35
         152       0.03      0.03      0.03        36
         153       0.00      0.00      0.00        42
         154       0.00      0.00      0.00        42
         155       0.00      0.00      0.00        39
         156       0.00      0.00      0.00        36
         157       0.03      0.03      0.03        29
         158       0.03      0.03      0.03        36
         159       0.02      0.02      0.02        44
         160       0.00      0.00      0.00        48
         161       0.00      0.00      0.00        45
         162       0.00      0.00      0.00        43
         163       0.02      0.02      0.02        44
         164       0.00      0.00      0.00        36
         165       0.02      0.02      0.02        41
         166       0.02      0.02      0.02        47
         167       0.02      0.02      0.02        46
         168       0.00      0.00      0.00        44
         169       0.00      0.00      0.00        42
         170       0.00      0.00      0.00        38
         171       0.00      0.00      0.00        44
         172       0.00      0.00      0.00        43
         173       0.03      0.02      0.03        41
         174       0.00      0.00      0.00        38
         175       0.00      0.00      0.00        30
         176       0.00      0.00      0.00        44
         177       0.00      0.00      0.00        41
         178       0.02      0.02      0.02        45
         179       0.00      0.00      0.00        42
         180       0.00      0.00      0.00        38
         181       0.00      0.00      0.00        46
         182       0.00      0.00      0.00        42
         183       0.03      0.03      0.03        40
         184       0.00      0.00      0.00        38
         185       0.00      0.00      0.00        40
         186       0.04      0.05      0.04        43
         187       0.03      0.02      0.03        43
         188       0.03      0.03      0.03        38
         189       0.00      0.00      0.00        42
         190       0.00      0.00      0.00        46
         191       0.00      0.00      0.00        43
         192       0.02      0.02      0.02        45
         193       0.00      0.00      0.00        41
         194       0.00      0.00      0.00        43
         195       0.00      0.00      0.00        40

    accuracy                           0.01      8041
   macro avg       0.01      0.01      0.01      8041
weighted avg       0.01      0.01      0.01      8041

Step 2: Design, train and test RCNN & its hybrids based object detection models to impose the bounding box or mask over the area of interest.¶

Using pre-trained models of coco faster_rcnn_nas_coco_2018_01_28 and examining car dataset, how it shows bbox.¶

In [ ]:
# Model Loader
def load_model(model_name):
  base_url = 'http://download.tensorflow.org/models/object_detection/'
  model_file = model_name + '.tar.gz'
  model_dir = tf.keras.utils.get_file(
    fname=model_name,
    origin=base_url + model_file,
    untar=True)

  model_dir = pathlib.Path(model_dir)/"saved_model"

  model = tf.saved_model.load(str(model_dir))
  model = model.signatures['serving_default']

  return model
In [ ]:
model_name = 'faster_rcnn_nas_coco_2018_01_28'
detection_model = load_model(model_name)
INFO:tensorflow:Saver not created because there are no variables in the graph to restore
2023-06-23 02:29:46.789343: I tensorflow/core/common_runtime/executor.cc:1197] [/device:CPU:0] (DEBUG INFO) Executor start aborting (this does not indicate an error and you can ignore this message): INVALID_ARGUMENT: You must feed a value for placeholder tensor 'unused_control_flow_input_8' with dtype int32
	 [[{{node unused_control_flow_input_8}}]]
In [ ]:
def run_inference_for_single_image(model, image):
  image = np.asarray(image)
  # The input needs to be a tensor, convert it using `tf.convert_to_tensor`.
  input_tensor = tf.convert_to_tensor(image)
  # The model expects a batch of images, so add an axis with `tf.newaxis`.
  input_tensor = input_tensor[tf.newaxis,...]

  # Run inference
  output_dict = model(input_tensor)

  # All outputs are batches tensors.
  # Convert to numpy arrays, and take index [0] to remove the batch dimension.
  # We're only interested in the first num_detections.
  num_detections = int(output_dict.pop('num_detections'))
  output_dict = {key:value[0, :num_detections].numpy()
                 for key,value in output_dict.items()}
  output_dict['num_detections'] = num_detections

  # detection_classes should be ints.
  output_dict['detection_classes'] = output_dict['detection_classes'].astype(np.int64)

  # Handle models with masks:
  if 'detection_masks' in output_dict:
    # Reframe the the bbox mask to the image size.
    detection_masks_reframed = utils_ops.reframe_box_masks_to_image_masks(
              output_dict['detection_masks'], output_dict['detection_boxes'],
               image.shape[0], image.shape[1])
    detection_masks_reframed = tf.cast(detection_masks_reframed > 0.5,
                                       tf.uint8)
    output_dict['detection_masks_reframed'] = detection_masks_reframed.numpy()

  return output_dict
In [ ]:
def show_inference(model, image_path):
  # the array based representation of the image will be used later in order to prepare the
  # result image with boxes and labels on it.
  image_np = cv2.resize(cv2.imread(image_path), dsize=(224, 224),interpolation=cv2.INTER_AREA)
  # Actual detection.
  output_dict = run_inference_for_single_image(model, image_np)
  # Visualization of the results of a detection.
  vis_util.visualize_boxes_and_labels_on_image_array(
      image_np,
      output_dict['detection_boxes'],
      output_dict['detection_classes'],
      output_dict['detection_scores'],
      category_index,
      instance_masks=output_dict.get('detection_masks_reframed', None),
      use_normalized_coordinates=True,
      line_thickness=8)

  display(Image.fromarray(image_np))
In [ ]:
for idx, row in testing_car_images_df.sample(random_state=random_state,n=3).iterrows():
    show_inference(detection_model, row['Image_Path'])
In [ ]:
for idx, row in training_car_images_df.sample(random_state=random_state,n=3).iterrows():
    show_inference(detection_model, row['Image_Path'])

Object detection models to impose the bounding box¶

In [ ]:
#For Training data set
file_names=[]
class_ids=[]
bboxes=[]
labels=[]

for i in range(len(annot_train_df)):
    annotations=annot_train_df.iloc[i]

    bboxs_x1=annotations[1]
    bboxs_x2=annotations[2]
    bboxs_x3=annotations[3]
    bboxs_x4=annotations[4]
    bboxes.append((bboxs_x1,bboxs_x2,bboxs_x3,bboxs_x4))

    class_id=annotations[5]
    labels.append('%04d' %(class_id,))

    file_names.append(annotations[0])

    class_ids.append(annotations[5])

## Comparing between file names, coordinates and their labels
print(file_names[:5])
print(bboxes[:5])
print(class_ids[:5])
print(labels[:5])
['00001.jpg', '00002.jpg', '00003.jpg', '00004.jpg', '00005.jpg']
[(39, 116, 569, 375), (36, 116, 868, 587), (85, 109, 601, 381), (621, 393, 1484, 1096), (14, 36, 133, 99)]
[14, 3, 91, 134, 106]
['0014', '0003', '0091', '0134', '0106']
In [ ]:
# function to convert BoundingBoxesOnImage object into DataFrame
def bbs_obj_to_df(bbs_object):
    # convert BoundingBoxesOnImage object into array
    bbs_array = bbs_object.to_xyxy_array()
    # convert array into a DataFrame ['xmin', 'ymin', 'xmax', 'ymax'] columns
    df_bbs = pd.DataFrame(bbs_array, columns=['xmin', 'ymin', 'xmax', 'ymax'])
    return df_bbs
In [ ]:
def display_img_bb(g,df):
    x1=df['xmin'][g]
    y1=df['ymin'][g]
    x2=df['xmax'][g]
    y2=df['ymax'][g]

    print('Class Name ->', df['class'][g])

    unscaled=cv2.imread(df['filename'][g])[...,::-1]
    fig, ax=plt.subplots(1)
    ax.imshow(unscaled)
    rect = patches.Rectangle((x1, y1), x2 - x1, y2 - y1, linewidth=2, edgecolor='orange', facecolor='none')
    ax.add_patch(rect)
    plt.grid(None)
    plt.axis('off')
    plt.show()
In [ ]:
# Remove quotes in the front to generate augmentation images and corresponding bounding boxes dataframe

# This setup of augmentation parameters will pick two of four given augmenters and apply them in random order
augmentor = iaa.SomeOf(2, [
    iaa.Affine(scale=(0.5, 1.5)),
    iaa.Affine(rotate=(-60, 60)),
    iaa.Affine(translate_percent={"x": (-0.3, 0.3), "y": (-0.3, 0.3)}),
    iaa.Fliplr(1),
    iaa.Multiply((0.5, 1.5)),
    iaa.GaussianBlur(sigma=(1.0, 3.0)),
    iaa.AdditiveGaussianNoise(scale=(0.03*255, 0.05*255))
])

aug_images_path='Car Images/Train Images Aug/'
image_prefix='train_aug_'
aug_bbs_xy = pd.DataFrame(columns=['filename', 'xmin', 'ymin', 'xmax', 'ymax','class'])

train_folder=glob.glob('Car Images/Train Images/*/*')
for j in range(len(train_folder)):
    index=file_names.index(train_folder[j].split('/')[-1])

    (x1,y1,x2,y2)=bboxes[index]
    tmp_array=[x1,y1,x2,y2]
    df = pd.DataFrame([tmp_array])
    df.columns =['col1','col2','col3','col4']
    bb_array=df.values
    filename=train_folder[j].split('/')[-1]
    classname=train_folder[j].split('/')[-2]
    file_path=os.path.join('Car Images/Train Images/'+classname+'/'+filename)
    image = imageio.imread(file_path)
    bbs = BoundingBoxesOnImage.from_xyxy_array(bb_array, shape=image.shape)
    image_aug, bbs_aug = augmentor(image=image, bounding_boxes=bbs)
    bbs_aug = bbs_aug.remove_out_of_image()
    bbs_aug = bbs_aug.clip_out_of_image()

    if re.findall('Image...', str(bbs_aug)) == ['Image([]']:
        pass
    else:
        imageio.imwrite(aug_images_path+image_prefix+filename, image_aug)
        bbs_df = bbs_obj_to_df(bbs_aug)
        im_df = pd.DataFrame(columns=['filename', 'xmin', 'ymin', 'xmax', 'ymax','class'])
        im_df = im_df.append({'filename' : aug_images_path+image_prefix+filename, 'xmin' : bbs_df.iloc[0]['xmin'], 'ymin' : bbs_df.iloc[0]['ymin'],'xmax' : bbs_df.iloc[0]['xmax'],'ymax' : bbs_df.iloc[0]['ymax'], 'class' : classname}  ,ignore_index = True)
        aug_bbs_xy = pd.concat([aug_bbs_xy, im_df])

aug_bbs_xy = aug_bbs_xy.reset_index()
aug_bbs_xy = aug_bbs_xy.drop(['index'], axis=1)

aug_bbs_xy.to_csv('aug_bbs_xy.csv', index=False )
aug_bbs_xy.head(5)
Out[ ]:
filename xmin ymin xmax ymax class
0 Car Images/Train Images Aug/train_aug_06796.jpg 34.233593 102.707161 338.233582 307.707153 Dodge Dakota Crew Cab 2010
1 Car Images/Train Images Aug/train_aug_04157.jpg 1.069397 16.964672 250.856064 174.215591 Dodge Dakota Crew Cab 2010
2 Car Images/Train Images Aug/train_aug_00682.jpg 11.000000 15.000000 297.000000 201.000000 Dodge Dakota Crew Cab 2010
3 Car Images/Train Images Aug/train_aug_07660.jpg 245.173126 0.000000 1024.000000 534.452820 Dodge Dakota Crew Cab 2010
4 Car Images/Train Images Aug/train_aug_00483.jpg 0.000000 0.000000 588.537964 304.321442 Dodge Dakota Crew Cab 2010
In [ ]:
# Displaying images and bounding boxes
display_img_bb(4,aug_bbs_xy)
display_img_bb(11,aug_bbs_xy)
Class Name -> Dodge Dakota Crew Cab 2010
Class Name -> Dodge Dakota Crew Cab 2010
In [ ]:
train_images_df=annot_train_car_images_df.copy(deep=True)
train_images_df.drop(columns=['Image_Name','Image_Class'],inplace=True)
train_images_df.columns=['filename','class','xmin','ymin','xmax','ymax']
train_images_df.head()
Out[ ]:
filename class xmin ymin xmax ymax
0 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 57 88 361 293
1 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 10 20 256 171
2 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 11 15 297 201
3 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 68 122 980 690
4 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 18 26 623 342
In [ ]:
aug_bbs_xy.shape,train_images_df.shape
Out[ ]:
((8144, 6), (8144, 6))
In [ ]:
## merging two datafames into one common dataframe where we can get all the details about both org images and arg images
final_train_df=  pd.DataFrame(columns=['filename', 'xmin', 'ymin', 'xmax', 'ymax','class'])
final_train_df = pd.concat([train_images_df, aug_bbs_xy], ignore_index=True)
final_train_df.reset_index(inplace=True)
final_train_df.drop(['index'], axis=1, inplace=True)

final_train_df.to_csv('final_train_df.csv', index=False )
final_train_df.head(5)
Out[ ]:
filename class xmin ymin xmax ymax
0 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 57.0 88.0 361.0 293.0
1 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 10.0 20.0 256.0 171.0
2 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 11.0 15.0 297.0 201.0
3 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 68.0 122.0 980.0 690.0
4 Car Images/Train Images/Dodge Dakota Crew Cab ... Dodge Dakota Crew Cab 2010 18.0 26.0 623.0 342.0
In [ ]:
# After merging original and augmented dataset
final_train_df.shape
Out[ ]:
(16288, 6)
In [ ]:
# Displaying images and bounding boxes
display_img_bb(1114,final_train_df)
display_img_bb(11111,final_train_df)
Class Name -> Aston Martin V8 Vantage Convertible 2012
Class Name -> Chrysler Town and Country Minivan 2012
In [ ]:
# Scaling final dataset
trainImagePaths=[]
trainBboxes =[]
trainLabels=[]

trainData= np.zeros((len(final_train_df), 224, 224,3), dtype=np.float32)

for i in range(len(final_train_df)):
    row_obj=final_train_df.iloc[i]
    fname=row_obj[0]
    label=row_obj[1]
    bbx1=row_obj[2]
    bbx2=row_obj[3]
    bbx3=row_obj[4]
    bbx4=row_obj[5]

    #appending values to list
    trainImagePaths.append(fname)
    trainLabels.append(label)

    ## operations on images and bounding boxes
    org_img=cv2.imread(fname)
    (h,w)=org_img.shape[:2]
    startX=float(bbx1)/w
    startY=float(bbx2)/h
    endX=float(bbx3)/w
    endY=float(bbx4)/h
    trainBboxes.append((startX,startY,endX,endY))

    trainData[i]=cv2.resize(org_img, dsize=(224, 224), interpolation=cv2.INTER_AREA)


trainData=np.array(trainData, dtype='float32')/255.0
trainLabels=np.array(trainLabels)
trainBboxes=np.array(trainBboxes, dtype='float32')
trainImagePaths=np.array(trainImagePaths)

np.save('Car Images/trainData.npy', trainData)
np.save('Car Images/trainLabels.npy', trainLabels)
np.save('Car Images/trainBboxes.npy', trainBboxes)
np.save('Car Images/trainImagePaths.npy', trainImagePaths)
In [ ]:
## performing one hot encoding on the labels
trainLabels_lb=lb.fit_transform(trainLabels)
In [ ]:
trainData.shape, trainBboxes.shape, trainLabels_lb.shape
Out[ ]:
((16288, 224, 224, 3), (16288, 4), (16288, 196))
In [ ]:
#For Test data set
test_file_names=[]
test_class_ids=[]
test_bboxes=[]
test_labels=[]

for i in range(len(annot_test_df)):
    annotations=annot_test_df.iloc[i]

    test_bboxs_x1=annotations[1]
    test_bboxs_x2=annotations[2]
    test_bboxs_x3=annotations[3]
    test_bboxs_x4=annotations[4]
    test_bboxes.append((test_bboxs_x1,test_bboxs_x2,test_bboxs_x3,test_bboxs_x4))

    test_class_id=annotations[5]
    test_labels.append('%04d' %(test_class_id,))

    test_file_names.append(annotations[0])

    test_class_ids.append(annotations[5])

## Comparing between file names, coordinates and their labels
print(test_file_names[:5])
print(test_bboxes[:5])
print(test_class_ids[:5])
print(test_labels[:5])
['00001.jpg', '00002.jpg', '00003.jpg', '00004.jpg', '00005.jpg']
[(30, 52, 246, 147), (100, 19, 576, 203), (51, 105, 968, 659), (67, 84, 581, 407), (140, 151, 593, 339)]
[181, 103, 145, 187, 185]
['0181', '0103', '0145', '0187', '0185']
In [ ]:
annot_test_df.shape
Out[ ]:
(8041, 6)
In [ ]:
# Scaling test dataset
testImagePaths=[]
testBboxes =[]
testLabels=[]

testData= np.zeros((len(annot_test_df), 224, 224,3), dtype=np.float32)

test_folder=glob.glob('Car Images/Test Images/*/*')
for j in range(len(test_folder)):
    index=test_file_names.index(test_folder[j].split('/')[-1])

    (x1,y1,x2,y2)=test_bboxes[index]
    test_filename=test_folder[j].split('/')[-1]
    classname=test_folder[j].split('/')[-2]
    test_file_path=os.path.join('Car Images/Test Images/'+classname+'/'+test_filename)

    testImagePaths.append(test_file_path)
    testLabels.append(classname)

    # Resizing the coordinates
    originalImage=cv2.imread(test_file_path)
    (h,w)=originalImage.shape[:2]
    startX=float(x1)/w
    startY=float(y1)/h
    endX=float(x2)/w
    endY=float(y2)/h
    testBboxes.append((startX,startY,endX,endY))
    testData[j] = cv2.resize(originalImage, dsize=(224, 224),interpolation=cv2.INTER_AREA)

testData=np.array(testData, dtype='float32')/255.0
testLabels=np.array(testLabels)
testBboxes=np.array(testBboxes, dtype='float32')
testImagePaths=np.array(testImagePaths)

np.save('Car Images/testData.npy', testData)
np.save('Car Images/testLabels.npy', testLabels)
np.save('Car Images/testBboxes.npy', testBboxes)
np.save('Car Images/testImagePaths.npy', testImagePaths)
In [ ]:
## performing one hot encoding on the labels
testLabels_lb=lb.fit_transform(testLabels)
In [ ]:
testData.shape, testBboxes.shape, testLabels_lb.shape
Out[ ]:
((8041, 224, 224, 3), (8041, 4), (8041, 196))
In [ ]:
backend.clear_session()
tf.random.set_seed(random_state)

mobile_net_model_new = MobileNetV2(input_shape=(224,224,3), include_top=False, weights='imagenet')

for layer in mobile_net_model_new.layers[:70]:
    layer.trainable=False
for layer in mobile_net_model_new.layers[70:]:
    layer.trainable=True

flatten=Flatten()(mobile_net_model_new.output)
In [ ]:
# construct a fully-connected layer header to output the predicted bounding box coordinates
bboxHead = Dense(128, activation="relu")(flatten)
bboxHead = Dense(64, activation="relu")(bboxHead)
bboxHead = Dense(32, activation="relu")(bboxHead)
bboxHead = Dense(4, activation="sigmoid",name="bounding_box")(bboxHead)

# construct a second fully-connected layer head, this one to predict the class label
softmaxHead = Dense(512, activation="relu")(flatten)
softmaxHead = Dense(256, activation="relu")(softmaxHead)
softmaxHead = Dense(128, activation="relu")(softmaxHead)
softmaxHead = Dense(196, activation="softmax",name="class_label")(softmaxHead)
In [ ]:
# construct a dictionary for our target training outputs
trainTargets = {
	"class_label": trainLabels_lb,
	"bounding_box": trainBboxes
}

# construct a second dictionary, this one for our target testing outputs
testTargets = {
	"class_label": testLabels_lb,
	"bounding_box": testBboxes
}
In [ ]:
# define a dictionary to set the loss methods -- categorical cross-entropy for the class label head and mean absolute error for the bounding box head
losses = {
	"class_label": categorical_crossentropy,
	"bounding_box": "mean_squared_error",
}

# define a dictionary that specifies the weights per loss (both the  class label and bounding box outputs will receive equal weight)
lossWeights = {
	"class_label": 1.0,
	"bounding_box": 1.0
}
In [ ]:
# put together our model which accept an input image and then output bounding box coordinates and a class label
model_mobile_net_bb = Model(inputs=mobile_net_model_new.input, outputs=(bboxHead, softmaxHead))

adam = Adam(learning_rate=0.001, beta_1=0.9, beta_2=0.999, decay=0.001)

model_mobile_net_bb.compile(optimizer=adam, loss= losses, metrics=['accuracy'],loss_weights=lossWeights)

## Looking into our base model
model_mobile_net_bb.summary()
Model: "model_3"
__________________________________________________________________________________________________
 Layer (type)                   Output Shape         Param #     Connected to                     
==================================================================================================
 input_1 (InputLayer)           [(None, 224, 224, 3  0           []                               
                                )]                                                                
                                                                                                  
 Conv1 (Conv2D)                 (None, 112, 112, 32  864         ['input_1[0][0]']                
                                )                                                                 
                                                                                                  
 bn_Conv1 (BatchNormalization)  (None, 112, 112, 32  128         ['Conv1[0][0]']                  
                                )                                                                 
                                                                                                  
 Conv1_relu (ReLU)              (None, 112, 112, 32  0           ['bn_Conv1[0][0]']               
                                )                                                                 
                                                                                                  
 expanded_conv_depthwise (Depth  (None, 112, 112, 32  288        ['Conv1_relu[0][0]']             
 wiseConv2D)                    )                                                                 
                                                                                                  
 expanded_conv_depthwise_BN (Ba  (None, 112, 112, 32  128        ['expanded_conv_depthwise[0][0]']
 tchNormalization)              )                                                                 
                                                                                                  
 expanded_conv_depthwise_relu (  (None, 112, 112, 32  0          ['expanded_conv_depthwise_BN[0][0
 ReLU)                          )                                ]']                              
                                                                                                  
 expanded_conv_project (Conv2D)  (None, 112, 112, 16  512        ['expanded_conv_depthwise_relu[0]
                                )                                [0]']                            
                                                                                                  
 expanded_conv_project_BN (Batc  (None, 112, 112, 16  64         ['expanded_conv_project[0][0]']  
 hNormalization)                )                                                                 
                                                                                                  
 block_1_expand (Conv2D)        (None, 112, 112, 96  1536        ['expanded_conv_project_BN[0][0]'
                                )                                ]                                
                                                                                                  
 block_1_expand_BN (BatchNormal  (None, 112, 112, 96  384        ['block_1_expand[0][0]']         
 ization)                       )                                                                 
                                                                                                  
 block_1_expand_relu (ReLU)     (None, 112, 112, 96  0           ['block_1_expand_BN[0][0]']      
                                )                                                                 
                                                                                                  
 block_1_pad (ZeroPadding2D)    (None, 113, 113, 96  0           ['block_1_expand_relu[0][0]']    
                                )                                                                 
                                                                                                  
 block_1_depthwise (DepthwiseCo  (None, 56, 56, 96)  864         ['block_1_pad[0][0]']            
 nv2D)                                                                                            
                                                                                                  
 block_1_depthwise_BN (BatchNor  (None, 56, 56, 96)  384         ['block_1_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_1_depthwise_relu (ReLU)  (None, 56, 56, 96)   0           ['block_1_depthwise_BN[0][0]']   
                                                                                                  
 block_1_project (Conv2D)       (None, 56, 56, 24)   2304        ['block_1_depthwise_relu[0][0]'] 
                                                                                                  
 block_1_project_BN (BatchNorma  (None, 56, 56, 24)  96          ['block_1_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_2_expand (Conv2D)        (None, 56, 56, 144)  3456        ['block_1_project_BN[0][0]']     
                                                                                                  
 block_2_expand_BN (BatchNormal  (None, 56, 56, 144)  576        ['block_2_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_2_expand_relu (ReLU)     (None, 56, 56, 144)  0           ['block_2_expand_BN[0][0]']      
                                                                                                  
 block_2_depthwise (DepthwiseCo  (None, 56, 56, 144)  1296       ['block_2_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_2_depthwise_BN (BatchNor  (None, 56, 56, 144)  576        ['block_2_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_2_depthwise_relu (ReLU)  (None, 56, 56, 144)  0           ['block_2_depthwise_BN[0][0]']   
                                                                                                  
 block_2_project (Conv2D)       (None, 56, 56, 24)   3456        ['block_2_depthwise_relu[0][0]'] 
                                                                                                  
 block_2_project_BN (BatchNorma  (None, 56, 56, 24)  96          ['block_2_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_2_add (Add)              (None, 56, 56, 24)   0           ['block_1_project_BN[0][0]',     
                                                                  'block_2_project_BN[0][0]']     
                                                                                                  
 block_3_expand (Conv2D)        (None, 56, 56, 144)  3456        ['block_2_add[0][0]']            
                                                                                                  
 block_3_expand_BN (BatchNormal  (None, 56, 56, 144)  576        ['block_3_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_3_expand_relu (ReLU)     (None, 56, 56, 144)  0           ['block_3_expand_BN[0][0]']      
                                                                                                  
 block_3_pad (ZeroPadding2D)    (None, 57, 57, 144)  0           ['block_3_expand_relu[0][0]']    
                                                                                                  
 block_3_depthwise (DepthwiseCo  (None, 28, 28, 144)  1296       ['block_3_pad[0][0]']            
 nv2D)                                                                                            
                                                                                                  
 block_3_depthwise_BN (BatchNor  (None, 28, 28, 144)  576        ['block_3_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_3_depthwise_relu (ReLU)  (None, 28, 28, 144)  0           ['block_3_depthwise_BN[0][0]']   
                                                                                                  
 block_3_project (Conv2D)       (None, 28, 28, 32)   4608        ['block_3_depthwise_relu[0][0]'] 
                                                                                                  
 block_3_project_BN (BatchNorma  (None, 28, 28, 32)  128         ['block_3_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_4_expand (Conv2D)        (None, 28, 28, 192)  6144        ['block_3_project_BN[0][0]']     
                                                                                                  
 block_4_expand_BN (BatchNormal  (None, 28, 28, 192)  768        ['block_4_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_4_expand_relu (ReLU)     (None, 28, 28, 192)  0           ['block_4_expand_BN[0][0]']      
                                                                                                  
 block_4_depthwise (DepthwiseCo  (None, 28, 28, 192)  1728       ['block_4_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_4_depthwise_BN (BatchNor  (None, 28, 28, 192)  768        ['block_4_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_4_depthwise_relu (ReLU)  (None, 28, 28, 192)  0           ['block_4_depthwise_BN[0][0]']   
                                                                                                  
 block_4_project (Conv2D)       (None, 28, 28, 32)   6144        ['block_4_depthwise_relu[0][0]'] 
                                                                                                  
 block_4_project_BN (BatchNorma  (None, 28, 28, 32)  128         ['block_4_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_4_add (Add)              (None, 28, 28, 32)   0           ['block_3_project_BN[0][0]',     
                                                                  'block_4_project_BN[0][0]']     
                                                                                                  
 block_5_expand (Conv2D)        (None, 28, 28, 192)  6144        ['block_4_add[0][0]']            
                                                                                                  
 block_5_expand_BN (BatchNormal  (None, 28, 28, 192)  768        ['block_5_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_5_expand_relu (ReLU)     (None, 28, 28, 192)  0           ['block_5_expand_BN[0][0]']      
                                                                                                  
 block_5_depthwise (DepthwiseCo  (None, 28, 28, 192)  1728       ['block_5_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_5_depthwise_BN (BatchNor  (None, 28, 28, 192)  768        ['block_5_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_5_depthwise_relu (ReLU)  (None, 28, 28, 192)  0           ['block_5_depthwise_BN[0][0]']   
                                                                                                  
 block_5_project (Conv2D)       (None, 28, 28, 32)   6144        ['block_5_depthwise_relu[0][0]'] 
                                                                                                  
 block_5_project_BN (BatchNorma  (None, 28, 28, 32)  128         ['block_5_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_5_add (Add)              (None, 28, 28, 32)   0           ['block_4_add[0][0]',            
                                                                  'block_5_project_BN[0][0]']     
                                                                                                  
 block_6_expand (Conv2D)        (None, 28, 28, 192)  6144        ['block_5_add[0][0]']            
                                                                                                  
 block_6_expand_BN (BatchNormal  (None, 28, 28, 192)  768        ['block_6_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_6_expand_relu (ReLU)     (None, 28, 28, 192)  0           ['block_6_expand_BN[0][0]']      
                                                                                                  
 block_6_pad (ZeroPadding2D)    (None, 29, 29, 192)  0           ['block_6_expand_relu[0][0]']    
                                                                                                  
 block_6_depthwise (DepthwiseCo  (None, 14, 14, 192)  1728       ['block_6_pad[0][0]']            
 nv2D)                                                                                            
                                                                                                  
 block_6_depthwise_BN (BatchNor  (None, 14, 14, 192)  768        ['block_6_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_6_depthwise_relu (ReLU)  (None, 14, 14, 192)  0           ['block_6_depthwise_BN[0][0]']   
                                                                                                  
 block_6_project (Conv2D)       (None, 14, 14, 64)   12288       ['block_6_depthwise_relu[0][0]'] 
                                                                                                  
 block_6_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_6_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_7_expand (Conv2D)        (None, 14, 14, 384)  24576       ['block_6_project_BN[0][0]']     
                                                                                                  
 block_7_expand_BN (BatchNormal  (None, 14, 14, 384)  1536       ['block_7_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_7_expand_relu (ReLU)     (None, 14, 14, 384)  0           ['block_7_expand_BN[0][0]']      
                                                                                                  
 block_7_depthwise (DepthwiseCo  (None, 14, 14, 384)  3456       ['block_7_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_7_depthwise_BN (BatchNor  (None, 14, 14, 384)  1536       ['block_7_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_7_depthwise_relu (ReLU)  (None, 14, 14, 384)  0           ['block_7_depthwise_BN[0][0]']   
                                                                                                  
 block_7_project (Conv2D)       (None, 14, 14, 64)   24576       ['block_7_depthwise_relu[0][0]'] 
                                                                                                  
 block_7_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_7_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_7_add (Add)              (None, 14, 14, 64)   0           ['block_6_project_BN[0][0]',     
                                                                  'block_7_project_BN[0][0]']     
                                                                                                  
 block_8_expand (Conv2D)        (None, 14, 14, 384)  24576       ['block_7_add[0][0]']            
                                                                                                  
 block_8_expand_BN (BatchNormal  (None, 14, 14, 384)  1536       ['block_8_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_8_expand_relu (ReLU)     (None, 14, 14, 384)  0           ['block_8_expand_BN[0][0]']      
                                                                                                  
 block_8_depthwise (DepthwiseCo  (None, 14, 14, 384)  3456       ['block_8_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_8_depthwise_BN (BatchNor  (None, 14, 14, 384)  1536       ['block_8_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_8_depthwise_relu (ReLU)  (None, 14, 14, 384)  0           ['block_8_depthwise_BN[0][0]']   
                                                                                                  
 block_8_project (Conv2D)       (None, 14, 14, 64)   24576       ['block_8_depthwise_relu[0][0]'] 
                                                                                                  
 block_8_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_8_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_8_add (Add)              (None, 14, 14, 64)   0           ['block_7_add[0][0]',            
                                                                  'block_8_project_BN[0][0]']     
                                                                                                  
 block_9_expand (Conv2D)        (None, 14, 14, 384)  24576       ['block_8_add[0][0]']            
                                                                                                  
 block_9_expand_BN (BatchNormal  (None, 14, 14, 384)  1536       ['block_9_expand[0][0]']         
 ization)                                                                                         
                                                                                                  
 block_9_expand_relu (ReLU)     (None, 14, 14, 384)  0           ['block_9_expand_BN[0][0]']      
                                                                                                  
 block_9_depthwise (DepthwiseCo  (None, 14, 14, 384)  3456       ['block_9_expand_relu[0][0]']    
 nv2D)                                                                                            
                                                                                                  
 block_9_depthwise_BN (BatchNor  (None, 14, 14, 384)  1536       ['block_9_depthwise[0][0]']      
 malization)                                                                                      
                                                                                                  
 block_9_depthwise_relu (ReLU)  (None, 14, 14, 384)  0           ['block_9_depthwise_BN[0][0]']   
                                                                                                  
 block_9_project (Conv2D)       (None, 14, 14, 64)   24576       ['block_9_depthwise_relu[0][0]'] 
                                                                                                  
 block_9_project_BN (BatchNorma  (None, 14, 14, 64)  256         ['block_9_project[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_9_add (Add)              (None, 14, 14, 64)   0           ['block_8_add[0][0]',            
                                                                  'block_9_project_BN[0][0]']     
                                                                                                  
 block_10_expand (Conv2D)       (None, 14, 14, 384)  24576       ['block_9_add[0][0]']            
                                                                                                  
 block_10_expand_BN (BatchNorma  (None, 14, 14, 384)  1536       ['block_10_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_10_expand_relu (ReLU)    (None, 14, 14, 384)  0           ['block_10_expand_BN[0][0]']     
                                                                                                  
 block_10_depthwise (DepthwiseC  (None, 14, 14, 384)  3456       ['block_10_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_10_depthwise_BN (BatchNo  (None, 14, 14, 384)  1536       ['block_10_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_10_depthwise_relu (ReLU)  (None, 14, 14, 384)  0          ['block_10_depthwise_BN[0][0]']  
                                                                                                  
 block_10_project (Conv2D)      (None, 14, 14, 96)   36864       ['block_10_depthwise_relu[0][0]']
                                                                                                  
 block_10_project_BN (BatchNorm  (None, 14, 14, 96)  384         ['block_10_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_11_expand (Conv2D)       (None, 14, 14, 576)  55296       ['block_10_project_BN[0][0]']    
                                                                                                  
 block_11_expand_BN (BatchNorma  (None, 14, 14, 576)  2304       ['block_11_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_11_expand_relu (ReLU)    (None, 14, 14, 576)  0           ['block_11_expand_BN[0][0]']     
                                                                                                  
 block_11_depthwise (DepthwiseC  (None, 14, 14, 576)  5184       ['block_11_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_11_depthwise_BN (BatchNo  (None, 14, 14, 576)  2304       ['block_11_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_11_depthwise_relu (ReLU)  (None, 14, 14, 576)  0          ['block_11_depthwise_BN[0][0]']  
                                                                                                  
 block_11_project (Conv2D)      (None, 14, 14, 96)   55296       ['block_11_depthwise_relu[0][0]']
                                                                                                  
 block_11_project_BN (BatchNorm  (None, 14, 14, 96)  384         ['block_11_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_11_add (Add)             (None, 14, 14, 96)   0           ['block_10_project_BN[0][0]',    
                                                                  'block_11_project_BN[0][0]']    
                                                                                                  
 block_12_expand (Conv2D)       (None, 14, 14, 576)  55296       ['block_11_add[0][0]']           
                                                                                                  
 block_12_expand_BN (BatchNorma  (None, 14, 14, 576)  2304       ['block_12_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_12_expand_relu (ReLU)    (None, 14, 14, 576)  0           ['block_12_expand_BN[0][0]']     
                                                                                                  
 block_12_depthwise (DepthwiseC  (None, 14, 14, 576)  5184       ['block_12_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_12_depthwise_BN (BatchNo  (None, 14, 14, 576)  2304       ['block_12_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_12_depthwise_relu (ReLU)  (None, 14, 14, 576)  0          ['block_12_depthwise_BN[0][0]']  
                                                                                                  
 block_12_project (Conv2D)      (None, 14, 14, 96)   55296       ['block_12_depthwise_relu[0][0]']
                                                                                                  
 block_12_project_BN (BatchNorm  (None, 14, 14, 96)  384         ['block_12_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_12_add (Add)             (None, 14, 14, 96)   0           ['block_11_add[0][0]',           
                                                                  'block_12_project_BN[0][0]']    
                                                                                                  
 block_13_expand (Conv2D)       (None, 14, 14, 576)  55296       ['block_12_add[0][0]']           
                                                                                                  
 block_13_expand_BN (BatchNorma  (None, 14, 14, 576)  2304       ['block_13_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_13_expand_relu (ReLU)    (None, 14, 14, 576)  0           ['block_13_expand_BN[0][0]']     
                                                                                                  
 block_13_pad (ZeroPadding2D)   (None, 15, 15, 576)  0           ['block_13_expand_relu[0][0]']   
                                                                                                  
 block_13_depthwise (DepthwiseC  (None, 7, 7, 576)   5184        ['block_13_pad[0][0]']           
 onv2D)                                                                                           
                                                                                                  
 block_13_depthwise_BN (BatchNo  (None, 7, 7, 576)   2304        ['block_13_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_13_depthwise_relu (ReLU)  (None, 7, 7, 576)   0           ['block_13_depthwise_BN[0][0]']  
                                                                                                  
 block_13_project (Conv2D)      (None, 7, 7, 160)    92160       ['block_13_depthwise_relu[0][0]']
                                                                                                  
 block_13_project_BN (BatchNorm  (None, 7, 7, 160)   640         ['block_13_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_14_expand (Conv2D)       (None, 7, 7, 960)    153600      ['block_13_project_BN[0][0]']    
                                                                                                  
 block_14_expand_BN (BatchNorma  (None, 7, 7, 960)   3840        ['block_14_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_14_expand_relu (ReLU)    (None, 7, 7, 960)    0           ['block_14_expand_BN[0][0]']     
                                                                                                  
 block_14_depthwise (DepthwiseC  (None, 7, 7, 960)   8640        ['block_14_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_14_depthwise_BN (BatchNo  (None, 7, 7, 960)   3840        ['block_14_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_14_depthwise_relu (ReLU)  (None, 7, 7, 960)   0           ['block_14_depthwise_BN[0][0]']  
                                                                                                  
 block_14_project (Conv2D)      (None, 7, 7, 160)    153600      ['block_14_depthwise_relu[0][0]']
                                                                                                  
 block_14_project_BN (BatchNorm  (None, 7, 7, 160)   640         ['block_14_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_14_add (Add)             (None, 7, 7, 160)    0           ['block_13_project_BN[0][0]',    
                                                                  'block_14_project_BN[0][0]']    
                                                                                                  
 block_15_expand (Conv2D)       (None, 7, 7, 960)    153600      ['block_14_add[0][0]']           
                                                                                                  
 block_15_expand_BN (BatchNorma  (None, 7, 7, 960)   3840        ['block_15_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_15_expand_relu (ReLU)    (None, 7, 7, 960)    0           ['block_15_expand_BN[0][0]']     
                                                                                                  
 block_15_depthwise (DepthwiseC  (None, 7, 7, 960)   8640        ['block_15_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_15_depthwise_BN (BatchNo  (None, 7, 7, 960)   3840        ['block_15_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_15_depthwise_relu (ReLU)  (None, 7, 7, 960)   0           ['block_15_depthwise_BN[0][0]']  
                                                                                                  
 block_15_project (Conv2D)      (None, 7, 7, 160)    153600      ['block_15_depthwise_relu[0][0]']
                                                                                                  
 block_15_project_BN (BatchNorm  (None, 7, 7, 160)   640         ['block_15_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 block_15_add (Add)             (None, 7, 7, 160)    0           ['block_14_add[0][0]',           
                                                                  'block_15_project_BN[0][0]']    
                                                                                                  
 block_16_expand (Conv2D)       (None, 7, 7, 960)    153600      ['block_15_add[0][0]']           
                                                                                                  
 block_16_expand_BN (BatchNorma  (None, 7, 7, 960)   3840        ['block_16_expand[0][0]']        
 lization)                                                                                        
                                                                                                  
 block_16_expand_relu (ReLU)    (None, 7, 7, 960)    0           ['block_16_expand_BN[0][0]']     
                                                                                                  
 block_16_depthwise (DepthwiseC  (None, 7, 7, 960)   8640        ['block_16_expand_relu[0][0]']   
 onv2D)                                                                                           
                                                                                                  
 block_16_depthwise_BN (BatchNo  (None, 7, 7, 960)   3840        ['block_16_depthwise[0][0]']     
 rmalization)                                                                                     
                                                                                                  
 block_16_depthwise_relu (ReLU)  (None, 7, 7, 960)   0           ['block_16_depthwise_BN[0][0]']  
                                                                                                  
 block_16_project (Conv2D)      (None, 7, 7, 320)    307200      ['block_16_depthwise_relu[0][0]']
                                                                                                  
 block_16_project_BN (BatchNorm  (None, 7, 7, 320)   1280        ['block_16_project[0][0]']       
 alization)                                                                                       
                                                                                                  
 Conv_1 (Conv2D)                (None, 7, 7, 1280)   409600      ['block_16_project_BN[0][0]']    
                                                                                                  
 Conv_1_bn (BatchNormalization)  (None, 7, 7, 1280)  5120        ['Conv_1[0][0]']                 
                                                                                                  
 out_relu (ReLU)                (None, 7, 7, 1280)   0           ['Conv_1_bn[0][0]']              
                                                                                                  
 flatten (Flatten)              (None, 62720)        0           ['out_relu[0][0]']               
                                                                                                  
 dense (Dense)                  (None, 128)          8028288     ['flatten[0][0]']                
                                                                                                  
 dense_3 (Dense)                (None, 512)          32113152    ['flatten[0][0]']                
                                                                                                  
 dense_1 (Dense)                (None, 64)           8256        ['dense[0][0]']                  
                                                                                                  
 dense_4 (Dense)                (None, 256)          131328      ['dense_3[0][0]']                
                                                                                                  
 dense_2 (Dense)                (None, 32)           2080        ['dense_1[0][0]']                
                                                                                                  
 dense_5 (Dense)                (None, 128)          32896       ['dense_4[0][0]']                
                                                                                                  
 bounding_box (Dense)           (None, 4)            132         ['dense_2[0][0]']                
                                                                                                  
 class_label (Dense)            (None, 196)          25284       ['dense_5[0][0]']                
                                                                                                  
==================================================================================================
Total params: 42,599,400
Trainable params: 42,434,600
Non-trainable params: 164,800
__________________________________________________________________________________________________
In [ ]:
visualkeras_view(model_mobile_net_bb)
Out[ ]:
In [ ]:
# Fit the model
model_mobile_net_bb_history = model_mobile_net_bb.fit(trainData, trainTargets, validation_data=(testData, testTargets), epochs=50, batch_size=50, verbose=1, callbacks=create_callbacks('model_mobile_net_bb'))
Epoch 1/50
326/326 [==============================] - ETA: 0s - loss: 4.2079 - bounding_box_loss: 0.0101 - class_label_loss: 4.1978 - bounding_box_accuracy: 0.7175 - class_label_accuracy: 0.1092
Epoch 1: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 889s 3s/step - loss: 4.2079 - bounding_box_loss: 0.0101 - class_label_loss: 4.1978 - bounding_box_accuracy: 0.7175 - class_label_accuracy: 0.1092 - val_loss: 6.7061 - val_bounding_box_loss: 0.0273 - val_class_label_loss: 6.6788 - val_bounding_box_accuracy: 0.6978 - val_class_label_accuracy: 0.0199 - lr: 0.0010
Epoch 2/50
326/326 [==============================] - ETA: 0s - loss: 2.5983 - bounding_box_loss: 0.0050 - class_label_loss: 2.5933 - bounding_box_accuracy: 0.7860 - class_label_accuracy: 0.3262
Epoch 2: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 2563s 8s/step - loss: 2.5983 - bounding_box_loss: 0.0050 - class_label_loss: 2.5933 - bounding_box_accuracy: 0.7860 - class_label_accuracy: 0.3262 - val_loss: 6.0115 - val_bounding_box_loss: 0.0193 - val_class_label_loss: 5.9922 - val_bounding_box_accuracy: 0.7268 - val_class_label_accuracy: 0.0849 - lr: 0.0010
Epoch 3/50
326/326 [==============================] - ETA: 0s - loss: 1.5936 - bounding_box_loss: 0.0036 - class_label_loss: 1.5900 - bounding_box_accuracy: 0.8021 - class_label_accuracy: 0.5508
Epoch 3: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 867s 3s/step - loss: 1.5936 - bounding_box_loss: 0.0036 - class_label_loss: 1.5900 - bounding_box_accuracy: 0.8021 - class_label_accuracy: 0.5508 - val_loss: 7.7603 - val_bounding_box_loss: 0.0166 - val_class_label_loss: 7.7437 - val_bounding_box_accuracy: 0.7728 - val_class_label_accuracy: 0.1252 - lr: 0.0010
Epoch 4/50
326/326 [==============================] - ETA: 0s - loss: 0.9718 - bounding_box_loss: 0.0031 - class_label_loss: 0.9687 - bounding_box_accuracy: 0.8161 - class_label_accuracy: 0.7145
Epoch 4: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 864s 3s/step - loss: 0.9718 - bounding_box_loss: 0.0031 - class_label_loss: 0.9687 - bounding_box_accuracy: 0.8161 - class_label_accuracy: 0.7145 - val_loss: 5.5538 - val_bounding_box_loss: 0.0087 - val_class_label_loss: 5.5451 - val_bounding_box_accuracy: 0.7897 - val_class_label_accuracy: 0.2296 - lr: 0.0010
Epoch 5/50
326/326 [==============================] - ETA: 0s - loss: 0.5894 - bounding_box_loss: 0.0025 - class_label_loss: 0.5869 - bounding_box_accuracy: 0.8178 - class_label_accuracy: 0.8187
Epoch 5: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 858s 3s/step - loss: 0.5894 - bounding_box_loss: 0.0025 - class_label_loss: 0.5869 - bounding_box_accuracy: 0.8178 - class_label_accuracy: 0.8187 - val_loss: 3.2162 - val_bounding_box_loss: 0.0062 - val_class_label_loss: 3.2100 - val_bounding_box_accuracy: 0.7928 - val_class_label_accuracy: 0.3978 - lr: 0.0010
Epoch 6/50
326/326 [==============================] - ETA: 0s - loss: 0.3451 - bounding_box_loss: 0.0021 - class_label_loss: 0.3429 - bounding_box_accuracy: 0.8317 - class_label_accuracy: 0.8960
Epoch 6: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 878s 3s/step - loss: 0.3451 - bounding_box_loss: 0.0021 - class_label_loss: 0.3429 - bounding_box_accuracy: 0.8317 - class_label_accuracy: 0.8960 - val_loss: 3.0557 - val_bounding_box_loss: 0.0032 - val_class_label_loss: 3.0526 - val_bounding_box_accuracy: 0.7873 - val_class_label_accuracy: 0.4610 - lr: 0.0010
Epoch 7/50
326/326 [==============================] - ETA: 0s - loss: 0.2038 - bounding_box_loss: 0.0018 - class_label_loss: 0.2020 - bounding_box_accuracy: 0.8340 - class_label_accuracy: 0.9363
Epoch 7: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 874s 3s/step - loss: 0.2038 - bounding_box_loss: 0.0018 - class_label_loss: 0.2020 - bounding_box_accuracy: 0.8340 - class_label_accuracy: 0.9363 - val_loss: 2.3969 - val_bounding_box_loss: 0.0029 - val_class_label_loss: 2.3940 - val_bounding_box_accuracy: 0.8316 - val_class_label_accuracy: 0.5702 - lr: 0.0010
Epoch 8/50
326/326 [==============================] - ETA: 0s - loss: 0.1354 - bounding_box_loss: 0.0016 - class_label_loss: 0.1338 - bounding_box_accuracy: 0.8366 - class_label_accuracy: 0.9581
Epoch 8: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 866s 3s/step - loss: 0.1354 - bounding_box_loss: 0.0016 - class_label_loss: 0.1338 - bounding_box_accuracy: 0.8366 - class_label_accuracy: 0.9581 - val_loss: 2.1989 - val_bounding_box_loss: 0.0023 - val_class_label_loss: 2.1966 - val_bounding_box_accuracy: 0.8392 - val_class_label_accuracy: 0.5902 - lr: 0.0010
Epoch 9/50
326/326 [==============================] - ETA: 0s - loss: 0.0812 - bounding_box_loss: 0.0014 - class_label_loss: 0.0798 - bounding_box_accuracy: 0.8398 - class_label_accuracy: 0.9750
Epoch 9: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 859s 3s/step - loss: 0.0812 - bounding_box_loss: 0.0014 - class_label_loss: 0.0798 - bounding_box_accuracy: 0.8398 - class_label_accuracy: 0.9750 - val_loss: 2.0435 - val_bounding_box_loss: 0.0023 - val_class_label_loss: 2.0412 - val_bounding_box_accuracy: 0.8408 - val_class_label_accuracy: 0.6165 - lr: 0.0010
Epoch 10/50
326/326 [==============================] - ETA: 0s - loss: 0.0693 - bounding_box_loss: 0.0013 - class_label_loss: 0.0681 - bounding_box_accuracy: 0.8445 - class_label_accuracy: 0.9799
Epoch 10: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 879s 3s/step - loss: 0.0693 - bounding_box_loss: 0.0013 - class_label_loss: 0.0681 - bounding_box_accuracy: 0.8445 - class_label_accuracy: 0.9799 - val_loss: 1.8106 - val_bounding_box_loss: 0.0025 - val_class_label_loss: 1.8081 - val_bounding_box_accuracy: 0.8366 - val_class_label_accuracy: 0.6469 - lr: 0.0010
Epoch 11/50
326/326 [==============================] - ETA: 0s - loss: 0.0596 - bounding_box_loss: 0.0011 - class_label_loss: 0.0585 - bounding_box_accuracy: 0.8511 - class_label_accuracy: 0.9826
Epoch 11: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 867s 3s/step - loss: 0.0596 - bounding_box_loss: 0.0011 - class_label_loss: 0.0585 - bounding_box_accuracy: 0.8511 - class_label_accuracy: 0.9826 - val_loss: 2.2723 - val_bounding_box_loss: 0.0022 - val_class_label_loss: 2.2702 - val_bounding_box_accuracy: 0.8478 - val_class_label_accuracy: 0.6216 - lr: 0.0010
Epoch 12/50
326/326 [==============================] - ETA: 0s - loss: 0.0629 - bounding_box_loss: 0.0011 - class_label_loss: 0.0618 - bounding_box_accuracy: 0.8542 - class_label_accuracy: 0.9815
Epoch 12: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 877s 3s/step - loss: 0.0629 - bounding_box_loss: 0.0011 - class_label_loss: 0.0618 - bounding_box_accuracy: 0.8542 - class_label_accuracy: 0.9815 - val_loss: 2.2775 - val_bounding_box_loss: 0.0022 - val_class_label_loss: 2.2754 - val_bounding_box_accuracy: 0.8464 - val_class_label_accuracy: 0.6211 - lr: 0.0010
Epoch 13/50
326/326 [==============================] - ETA: 0s - loss: 0.0465 - bounding_box_loss: 9.9923e-04 - class_label_loss: 0.0455 - bounding_box_accuracy: 0.8527 - class_label_accuracy: 0.9863
Epoch 13: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 875s 3s/step - loss: 0.0465 - bounding_box_loss: 9.9923e-04 - class_label_loss: 0.0455 - bounding_box_accuracy: 0.8527 - class_label_accuracy: 0.9863 - val_loss: 2.4350 - val_bounding_box_loss: 0.0020 - val_class_label_loss: 2.4330 - val_bounding_box_accuracy: 0.8401 - val_class_label_accuracy: 0.6101 - lr: 0.0010
Epoch 14/50
326/326 [==============================] - ETA: 0s - loss: 0.0361 - bounding_box_loss: 9.0862e-04 - class_label_loss: 0.0352 - bounding_box_accuracy: 0.8605 - class_label_accuracy: 0.9893
Epoch 14: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 864s 3s/step - loss: 0.0361 - bounding_box_loss: 9.0862e-04 - class_label_loss: 0.0352 - bounding_box_accuracy: 0.8605 - class_label_accuracy: 0.9893 - val_loss: 2.1622 - val_bounding_box_loss: 0.0019 - val_class_label_loss: 2.1603 - val_bounding_box_accuracy: 0.8495 - val_class_label_accuracy: 0.6259 - lr: 0.0010
Epoch 15/50
326/326 [==============================] - ETA: 0s - loss: 0.0173 - bounding_box_loss: 7.6733e-04 - class_label_loss: 0.0166 - bounding_box_accuracy: 0.8643 - class_label_accuracy: 0.9955
Epoch 15: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 863s 3s/step - loss: 0.0173 - bounding_box_loss: 7.6733e-04 - class_label_loss: 0.0166 - bounding_box_accuracy: 0.8643 - class_label_accuracy: 0.9955 - val_loss: 1.8331 - val_bounding_box_loss: 0.0019 - val_class_label_loss: 1.8312 - val_bounding_box_accuracy: 0.8551 - val_class_label_accuracy: 0.6694 - lr: 1.0000e-04
Epoch 16/50
326/326 [==============================] - ETA: 0s - loss: 0.0080 - bounding_box_loss: 6.9729e-04 - class_label_loss: 0.0073 - bounding_box_accuracy: 0.8653 - class_label_accuracy: 0.9983
Epoch 16: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 859s 3s/step - loss: 0.0080 - bounding_box_loss: 6.9729e-04 - class_label_loss: 0.0073 - bounding_box_accuracy: 0.8653 - class_label_accuracy: 0.9983 - val_loss: 1.7398 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.7380 - val_bounding_box_accuracy: 0.8571 - val_class_label_accuracy: 0.6824 - lr: 1.0000e-04
Epoch 17/50
326/326 [==============================] - ETA: 0s - loss: 0.0057 - bounding_box_loss: 6.6173e-04 - class_label_loss: 0.0051 - bounding_box_accuracy: 0.8659 - class_label_accuracy: 0.9987
Epoch 17: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 869s 3s/step - loss: 0.0057 - bounding_box_loss: 6.6173e-04 - class_label_loss: 0.0051 - bounding_box_accuracy: 0.8659 - class_label_accuracy: 0.9987 - val_loss: 1.6903 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.6885 - val_bounding_box_accuracy: 0.8581 - val_class_label_accuracy: 0.6866 - lr: 1.0000e-04
Epoch 18/50
326/326 [==============================] - ETA: 0s - loss: 0.0051 - bounding_box_loss: 6.3289e-04 - class_label_loss: 0.0044 - bounding_box_accuracy: 0.8686 - class_label_accuracy: 0.9987
Epoch 18: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 859s 3s/step - loss: 0.0051 - bounding_box_loss: 6.3289e-04 - class_label_loss: 0.0044 - bounding_box_accuracy: 0.8686 - class_label_accuracy: 0.9987 - val_loss: 1.6827 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.6809 - val_bounding_box_accuracy: 0.8561 - val_class_label_accuracy: 0.6900 - lr: 1.0000e-04
Epoch 19/50
326/326 [==============================] - ETA: 0s - loss: 0.0047 - bounding_box_loss: 6.0858e-04 - class_label_loss: 0.0041 - bounding_box_accuracy: 0.8671 - class_label_accuracy: 0.9986
Epoch 19: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 872s 3s/step - loss: 0.0047 - bounding_box_loss: 6.0858e-04 - class_label_loss: 0.0041 - bounding_box_accuracy: 0.8671 - class_label_accuracy: 0.9986 - val_loss: 1.6929 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.6912 - val_bounding_box_accuracy: 0.8575 - val_class_label_accuracy: 0.6938 - lr: 1.0000e-04
Epoch 20/50
326/326 [==============================] - ETA: 0s - loss: 0.0043 - bounding_box_loss: 5.9574e-04 - class_label_loss: 0.0037 - bounding_box_accuracy: 0.8689 - class_label_accuracy: 0.9986
Epoch 20: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 855s 3s/step - loss: 0.0043 - bounding_box_loss: 5.9574e-04 - class_label_loss: 0.0037 - bounding_box_accuracy: 0.8689 - class_label_accuracy: 0.9986 - val_loss: 1.7029 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.7011 - val_bounding_box_accuracy: 0.8598 - val_class_label_accuracy: 0.6937 - lr: 1.0000e-04
Epoch 21/50
326/326 [==============================] - ETA: 0s - loss: 0.0039 - bounding_box_loss: 5.8012e-04 - class_label_loss: 0.0034 - bounding_box_accuracy: 0.8695 - class_label_accuracy: 0.9986
Epoch 21: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 861s 3s/step - loss: 0.0039 - bounding_box_loss: 5.8012e-04 - class_label_loss: 0.0034 - bounding_box_accuracy: 0.8695 - class_label_accuracy: 0.9986 - val_loss: 1.7038 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.7021 - val_bounding_box_accuracy: 0.8582 - val_class_label_accuracy: 0.6947 - lr: 1.0000e-04
Epoch 22/50
326/326 [==============================] - ETA: 0s - loss: 0.0036 - bounding_box_loss: 5.6132e-04 - class_label_loss: 0.0031 - bounding_box_accuracy: 0.8696 - class_label_accuracy: 0.9987
Epoch 22: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 856s 3s/step - loss: 0.0036 - bounding_box_loss: 5.6132e-04 - class_label_loss: 0.0031 - bounding_box_accuracy: 0.8696 - class_label_accuracy: 0.9987 - val_loss: 1.7257 - val_bounding_box_loss: 0.0018 - val_class_label_loss: 1.7239 - val_bounding_box_accuracy: 0.8579 - val_class_label_accuracy: 0.6944 - lr: 1.0000e-04
Epoch 23/50
326/326 [==============================] - ETA: 0s - loss: 0.0033 - bounding_box_loss: 5.4880e-04 - class_label_loss: 0.0028 - bounding_box_accuracy: 0.8735 - class_label_accuracy: 0.9988
Epoch 23: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 861s 3s/step - loss: 0.0033 - bounding_box_loss: 5.4880e-04 - class_label_loss: 0.0028 - bounding_box_accuracy: 0.8735 - class_label_accuracy: 0.9988 - val_loss: 1.7232 - val_bounding_box_loss: 0.0017 - val_class_label_loss: 1.7214 - val_bounding_box_accuracy: 0.8587 - val_class_label_accuracy: 0.6958 - lr: 1.0000e-04
Epoch 24/50
326/326 [==============================] - ETA: 0s - loss: 0.0032 - bounding_box_loss: 5.3336e-04 - class_label_loss: 0.0027 - bounding_box_accuracy: 0.8716 - class_label_accuracy: 0.9988
Epoch 24: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 852s 3s/step - loss: 0.0032 - bounding_box_loss: 5.3336e-04 - class_label_loss: 0.0027 - bounding_box_accuracy: 0.8716 - class_label_accuracy: 0.9988 - val_loss: 1.7418 - val_bounding_box_loss: 0.0017 - val_class_label_loss: 1.7400 - val_bounding_box_accuracy: 0.8586 - val_class_label_accuracy: 0.6964 - lr: 1.0000e-04
Epoch 25/50
326/326 [==============================] - ETA: 0s - loss: 0.0033 - bounding_box_loss: 5.2044e-04 - class_label_loss: 0.0028 - bounding_box_accuracy: 0.8744 - class_label_accuracy: 0.9988
Epoch 25: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 855s 3s/step - loss: 0.0033 - bounding_box_loss: 5.2044e-04 - class_label_loss: 0.0028 - bounding_box_accuracy: 0.8744 - class_label_accuracy: 0.9988 - val_loss: 1.7631 - val_bounding_box_loss: 0.0017 - val_class_label_loss: 1.7613 - val_bounding_box_accuracy: 0.8571 - val_class_label_accuracy: 0.6951 - lr: 1.0000e-04
Epoch 26/50
326/326 [==============================] - ETA: 0s - loss: 0.0029 - bounding_box_loss: 5.1107e-04 - class_label_loss: 0.0024 - bounding_box_accuracy: 0.8721 - class_label_accuracy: 0.9988
Epoch 26: saving model to model_mobile_net_bb_weights.h5
326/326 [==============================] - 862s 3s/step - loss: 0.0029 - bounding_box_loss: 5.1107e-04 - class_label_loss: 0.0024 - bounding_box_accuracy: 0.8721 - class_label_accuracy: 0.9988 - val_loss: 1.7712 - val_bounding_box_loss: 0.0017 - val_class_label_loss: 1.7695 - val_bounding_box_accuracy: 0.8596 - val_class_label_accuracy: 0.6958 - lr: 1.0000e-04
In [ ]:
# create an iterator object with write permission - model_mobile_net_bb.pkl
with open('model_mobile_net_bb.pkl', 'wb') as files:
    pickle.dump(model_mobile_net_bb, files)
model_mobile_net_bb.save('model_mobile_net_bb.h5')

# serialize the label binarizer to disk
with open('lb.pkl', 'wb') as files:
    pickle.dump(lb, files)

# serialize the label binarizer to disk
with open('model_mobile_net_bb_history.pkl', 'wb') as files:
    pickle.dump(model_mobile_net_bb_history, files)
In [ ]:
## Drawing plots
# plot the total loss, label loss, and bounding box loss
lossNames = ["loss", "class_label_loss", "bounding_box_loss"]
N = np.arange(0, 26)
plt.style.use("ggplot")
(fig, ax) = plt.subplots(3, 1, figsize=(7, 7))

# loop over the loss names
for (i, l) in enumerate(lossNames):
	# plot the loss for both the training and validation data
	title = "Loss for {}".format(l) if l != "loss" else "Total loss"
	ax[i].set_title(title)
	ax[i].set_xlabel("Epoch #")
	ax[i].set_ylabel("Loss")
	ax[i].plot(N, model_mobile_net_bb_history.history[l], label=l)
	ax[i].plot(N, model_mobile_net_bb_history.history["val_" + l], label="val_" + l)
	ax[i].legend()
fig.tight_layout()

plt.style.use("ggplot")
plt.figure(figsize=(5,3))
plt.plot(N, model_mobile_net_bb_history.history["bounding_box_accuracy"],
	label="bounding_box_train_acc")
plt.plot(N, model_mobile_net_bb_history.history["val_bounding_box_accuracy"],
	label="bounding_box_val_acc")
plt.title("Bounding box Accuracy")
plt.xlabel("Epoch #")
plt.ylabel("Accuracy")
plt.legend(loc="lower right")


plt.style.use("ggplot")
plt.figure(figsize=(5,3))
plt.plot(N, model_mobile_net_bb_history.history["class_label_accuracy"],
	label="class_label_train_acc")
plt.plot(N, model_mobile_net_bb_history.history["val_class_label_accuracy"],
	label="val_class_label_acc")
plt.title("Class Label Accuracy")
plt.xlabel("Epoch #")
plt.ylabel("Accuracy")
plt.legend(loc="lower right")
plt.show()
In [ ]:
def predict(image_path, model):
    img1 = load_img(image_path, target_size=(224, 224))
    img1 = img_to_array(img1) / 255.0
    img1 = np.expand_dims(img1, axis=0)

    car_name=image_path.split('/')[-2]

    (box_pred, label_pred) = model.predict(img1)
    (x1, y1, x2, y2) = box_pred[0]

    i = np.argmax(label_pred, axis=1)
    label = lb.classes_[i][0]

    image2 = cv2.imread(image_path)
    (h, w) = image2.shape[:2]

    # scale the predicted bounding box coordinates based on the image dimensions
    x1 = int(x1 * w)
    y1 = int(y1 * h)
    x2 = int(x2 * w)
    y2 = int(y2 * h)

    # draw the predicted bounding box and class label on the image
    y = y1 - 10 if y1 - 10 > 10 else y1 + 10

    cv2.rectangle(image2, (x1, y1), (x2, y2), (17, 32, 80), 2)

    cv2.putText(image2, label, (x1, y), cv2.FONT_HERSHEY_TRIPLEX, 0.45, (17, 32, 80), 1)

    top5_predictions = np.argsort(label_pred, axis=1)[:,-5:]

    names = []
    for i in range(1,6):
      names += [lb.classes_[top5_predictions[0][-i]]]

    probs = np.sort(label_pred)[:,-5:][0][::-1]

    # Plotting test image and predicted probabilites
    f, ax = plt.subplots(2,figsize = (6,10))

    ax[0].imshow(image2[...,::-1]),ax[0].axis('off'),ax[0].grid(None)
    ax[0].set_title(car_name)

    y_names = np.arange(len(names))
    ax[1].barh(y_names, probs/probs.sum(), color='orange')
    ax[1].set_yticks(y_names)
    ax[1].set_yticklabels(names)
    ax[1].invert_yaxis()
    ax[1].set_title('Top 5 Predictions')

    plt.show()
In [ ]:
predict('Car Images/Test Images/Land Rover LR2 SUV 2012/01116.jpg', model_mobile_net_bb)
1/1 [==============================] - 0s 57ms/step
In [ ]:
predict('Car Images/Test Images/Mercedes-Benz C-Class Sedan 2012/01127.jpg', model_mobile_net_bb)
1/1 [==============================] - 0s 87ms/step
In [ ]: